query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Load and validate a config file, returning any errors encountered. If the config file is valid, the tuple returned contains the loaded config as the first element, and the second element is None. Otherwise, the second element is an iterable of errors that occurred during validation
def check_config( config_file: Path, ) -> Tuple[Optional[Dict[str, Any]], Optional[Iterable[errors.ValidationError]]]: load_data = parse_config(config_file) load_data, load_data_dup = tee(load_data) first = next(load_data_dup) if isinstance(first, errors.ValidationError): validation_errors = cast( Iterable[errors.ValidationError], filter(lambda v: isinstance(v, errors.ValidationError), load_data), ) return (None, validation_errors) else: return (first, None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_config(\n config_file: Path,\n) -> Iterable[Union[errors.ValidationError, Dict[str, Any]]]:\n # root = config_file.parent\n config = load_any(config_file)\n schema_errors: Iterable[errors.SchemaValidationError] = (\n errors.SchemaValidationError(str(e), e)\n for e in config_schema_validator.iter_errors(config)\n )\n # Make a duplicate to check whethere there are errors returned\n schema_errors, schema_errors_dup = tee(schema_errors)\n # This is the same test as used in Validator.is_valid\n if next(schema_errors_dup, None) is not None:\n yield from schema_errors\n yield config", "def load_config(config_file: Path) -> Dict[str, Any]:\n config, errors = check_config(config_file)\n if errors is not None:\n raise next(iter(errors))\n # errors is None\n assert config is not None\n return config", "def validate_config_file(self, config_file):\n config_dict = ConfigObj(config_file, configspec=CFG_SPEC.split('\\n'))\n result = config_dict.validate(Validator(), copy=True, preserve_errors=True)\n if result != True:\n msg = 'Config file validation failed: %s' % result\n raise Exception(msg)\n return config_dict", "def _parseConfigFile(self):\n\n configFile = self._configFile()\n\n configs = configparser.SafeConfigParser()\n try:\n with open(configFile, 'r', encoding='utf-8') as fh:\n try:\n configs.readfp(fh)\n return configs\n except configparser.Error:\n log(ERROR, traceback.format_exc())\n return None\n except IOError:\n log(DEBUG, \"Error: Could not read from config file {0}\\n\".format(configFile))\n return None", "def _validate(self, config):\n validator = Validator()\n try:\n results = config.validate(validator, preserve_errors=True)\n except ConfigObjError as e:\n raise ConfigError(e.message)\n if results is not True:\n error_msg = \"\"\n for (section_list, key, res) in flatten_errors(config, results):\n if key is not None:\n if res is False:\n msg = 'key \"%s\" in section \"%s\" is missing.'\n msg = msg % (key, \", \".join(section_list))\n else:\n msg = 'key \"%s\" in section \"%s\" failed validation: %s'\n msg = msg % (key, \", \".join(section_list), res)\n else:\n msg = 'section \"%s\" is missing' % \".\".join(section_list)\n error_msg += msg + \"\\n\"\n raise ConfigError(error_msg)\n return config", "def settings_validate(ctx):\n path = ctx.obj['load_path']\n if not path:\n _raise_settings_not_found()\n with open(path) as handle:\n config_dict = json.load(handle)\n try:\n config.validate_config(config_dict)\n except exceptions.ConfigValidationError as err:\n raise click.ClickException(\n '{} is invalid: '.format(path) + err.message\n ) from err", "def validate_config(config: Dict) -> None:\n\n # For validating with pydantic\n config_flattened = format_config_for_validation(config)\n user_defined_sets = get_all_sets(config)\n\n errors = []\n for input_data in config_flattened:\n try:\n if \"type\" not in input_data:\n UserDefinedValue(**input_data)\n elif input_data[\"type\"] == \"param\":\n input_data[\"defined_sets\"] = user_defined_sets\n UserDefinedParameter(**input_data)\n elif input_data[\"type\"] == \"result\":\n input_data[\"defined_sets\"] = user_defined_sets\n UserDefinedResult(**input_data)\n elif input_data[\"type\"] == \"set\":\n UserDefinedSet(**input_data)\n else:\n # have pydantic raise an error\n UserDefinedValue(\n name=input_data[\"name\"],\n type=input_data[\"type\"],\n dtype=input_data[\"dtype\"],\n )\n except ValidationError as ex:\n errors_caught = [x[\"msg\"] for x in ex.errors()]\n errors.extend(errors_caught)\n\n if errors:\n error_message = \"\\n\".join(errors)\n raise OtooleConfigFileError(message=f\"\\n{error_message}\")", "def load_and_exit(config_file_name=\"network_importer.toml\", config_data=None):\n try:\n load(config_file_name=config_file_name, config_data=config_data)\n except ValidationError as err:\n print(f\"Configuration not valid, found {len(err.errors())} error(s)\")\n for error in err.errors():\n print(f\" {'/'.join(error['loc'])} | {error['msg']} ({error['type']})\")\n sys.exit(1)\n except ConfigLoadFatalError as err:\n print(\"Configuration not valid\")\n print(f\" {err}\")\n sys.exit(1)", "def get_valid_config(args):\n source = confuse.YamlSource(args.config)\n config = confuse.RootView([source])\n\n job_template = {\n \"job\": {\n \"name\": str,\n \"dir\": confuse.Optional(\n FilenameValidate(\n cwd=str(pathlib.Path(__file__).parent.absolute())),\n default=str(pathlib.Path(__file__).parent.absolute())\n ),\n }\n }\n job_config = config.get(job_template)\n\n logging_template = confuse.Optional(\n confuse.MappingTemplate({\n 'ids': confuse.StrSeq(),\n 'data': confuse.Sequence(\n confuse.Choice(['objectives', 'state', 'variables'])),\n 'timestamped': confuse.Optional(bool, default=True),\n \"to_file\": confuse.Optional(bool, default=True),\n \"to_console\": confuse.Optional(bool, default=False)\n })\n )\n\n sumo_template = {\n \"dir\": FilenameValidate(\n cwd=job_config.job.dir),\n \"gui\": confuse.Optional(bool, default=True),\n \"max_steps\": confuse.Optional(int, default=10e5),\n \"network\": FilenameValidate(relative_to=\"dir\"),\n }\n sumo_config = config.get({\"sumo\": sumo_template})\n sumo_template[\"additional\"] = confuse.Sequence(\n FilenameValidate(cwd=sumo_config.sumo.dir))\n sumo_template[\"route\"] = confuse.Sequence(\n FilenameValidate(cwd=sumo_config.sumo.dir))\n\n tls_template = confuse.Sequence({\n \"id\": str,\n \"controller\": confuse.Choice(\n TLSFactory.get_registered_keys()),\n \"constants\": confuse.MappingValues(\n confuse.OneOf([\n confuse.Number(),\n AllowedContainers(list),\n AllowedContainers(dict),\n FilenameValidate(cwd=job_config.job.dir),\n ExecutableValidate()\n ])\n ),\n \"variables\": confuse.MappingValues(\n confuse.OneOf([\n confuse.Number(),\n AllowedContainers(list)\n ])\n ),\n \"extract\": {\n \"user_data\": confuse.Sequence({\n \"feature\": confuse.Choice(\n [\"count\", \"speed\", \"eta\", \"delay\", \"waiting_time\"]),\n \"user_class\": confuse.Choice(\n [\"bicycle\", \"passenger\", \"pedestrian\", \"bus\", \"truck\", \"moped\"]),\n \"at\": confuse.Choice(\n [\"lane\", \"detector\", \"phase\"]),\n \"mapping\": AllowedContainers(dict)\n }),\n \"tls_data\": confuse.Sequence({\n \"feature\": confuse.Choice(\n [\"elapsed_time\", \"integer_phase\", \"binary_phase\"]),\n \"to_variable\": str\n })\n }\n })\n\n full_template = {\n \"logging\": logging_template,\n \"sumo\": sumo_template,\n \"tls\": tls_template,\n }\n job_template.update(full_template)\n valid_config = config.get(job_template)\n\n # second round of sumo validation\n assert len(valid_config.sumo.route) > 0, \\\n \"No demand definition: sumo.route is an empty list, expected at least one *.rou.xml\"\n \n # second round of logger validation, look if ids are given\n if valid_config.logging:\n if valid_config.logging.ids and valid_config.logging.data:\n output_dir = os.path.join(valid_config.job.dir, \"output\")\n os.makedirs(output_dir, exist_ok=True)\n valid_config.logging.update({\"dir\": output_dir})\n else:\n del valid_config['logging']\n\n return valid_config", "def read_config(filepath):\n try:\n return load_from_file(filepath)\n except (ValueError, yaml.reader.ReaderError, yaml.parser.ParserError) as ex:\n raise Failure(\"Config parsing error: \"+str(ex), 3, ex)", "def validate_config(self):\n reference = data_file(\"../config/template/minimum_aiscalator.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"In Global Application Configuration file \"\n _validate_configs(self._app_conf, ref, msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/aiscalator.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"In Global Application Configuration file \"\n _validate_configs(self._app_conf, ref, msg,\n missing_exception=False,\n type_mismatch_exception=True)\n if self._step_name:\n reference = data_file(\"../config/template/minimum_step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in step named \" + self._step_name\n _validate_configs(self._step,\n ref[\"steps\"][\"Untitled\"],\n msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in step named \" + self._step_name\n _validate_configs(self._step,\n ref[\"steps\"][\"Untitled\"],\n msg,\n missing_exception=False,\n type_mismatch_exception=True)\n if self._dag_name:\n reference = data_file(\"../config/template/minimum_dag.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in dag named \" + self._dag_name\n _validate_configs(self._dag,\n ref[\"dags\"][\"Untitled\"],\n msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in dag named \" + self._dag_name\n _validate_configs(self._dag,\n ref[\"dags\"][\"Untitled\"],\n msg,\n missing_exception=False,\n type_mismatch_exception=True)", "def __check_configuration__(self, parser):\n if not parser.has_section('core'):\n self.logger.error('The config file should contain a core section with at least the module_path specified')\n sys.exit(1)\n\n else:\n if parser.get('core', 'modules_path', fallback=None) is None:\n self.logger.error('The configuration file should contain at least the modules_path value in core section.')\n sys.exit(1)\n\n if not parser.has_section('mysql'):\n self.logger.error('The config file should contain a mysql section.')\n sys.exit(1)\n\n else:\n if parser.get('mysql', 'host', fallback=None) is None:\n self.logger.error('The config file should contain the host value in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'port', fallback=None) is None:\n self.logger.error('The config file should contain the port value in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'user', fallback=None) is None:\n self.logger.error('The config file should contain the user in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'password', fallback=None) is None:\n self.logger.error('The config file should contain the password of the user in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'server_id', fallback=None) is None:\n self.logger.error('The config file should contain the server_id in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'tables', fallback=None) is not None:\n tables = [table.strip() for table in parser.get('mysql', 'tables').split(',')]\n for table in tables:\n if not parser.has_section(table):\n self.logger.error('The config file should contain a section about the table : %s' % table)\n exit(1)\n if parser.get(table, 'index_label', fallback=None) is None :\n self.logger.error('The config file should contain a table section with a index_label value.')\n exit(1)\n else:\n self.logger.error('The config file should contain a tables value with all the tables to replicate.')\n exit(1)", "def validate_config(config):\n # check if paths are valid\n check_paths = {\n 'data_path': r'data$',\n 'master_list_path': r'master_list\\.csv$',\n 'duplicate_list_path': r'duplicate_list\\.csv$',\n 'log_path': r'data[\\\\\\/]jobfunnel.log$',\n 'filter_list_path': r'data[\\\\\\/]filter_list\\.json$',\n }\n\n for path, pattern in check_paths.items():\n if not re.search(pattern, config[path]):\n raise ConfigError(path)\n # check if the provider list only consists of supported providers\n if not set(config['providers']).issubset(PROVIDERS):\n raise ConfigError('providers')\n\n # check validity of region settings\n validate_region(config['search_terms']['region'])\n\n # check validity of delay settings\n validate_delay(config['delay_config'])\n\n # check the validity of max_listing_days settings\n if(config['max_listing_days'] is not None and config['max_listing_days'] < 0):\n raise ConfigError('max_listing_days')", "def validate_config(self):\n pass", "def validate_config(self):\n pass", "def get_validated_config(filename: str = CONFIG_ENV) -> dict:\n if filename == CONFIG_DEFAULT:\n logger.warning(\n \"Config filename environment variable LAUNCHPAD_CFG not set, \"\n \"using default file: %s\",\n repr(CONFIG_DEFAULT),\n )\n logger.info(\"Loading configuration file %s...\", filename)\n\n with open(filename, encoding=\"utf-8\") as f:\n return get_validated_config_str(f)", "def _validate_config(self, conf: Dict[str, Any]) -> Dict[str, Any]:\n try:\n validate(conf, constant.CONF_SCHEMA, Draft4Validator)\n return conf\n except ValidationError as exception:\n logger.critical(\n 'Invalid configuration. See config.json.example. Reason: %s',\n exception\n )\n raise ValidationError(\n best_match(Draft4Validator(constant.CONF_SCHEMA).iter_errors(conf)).message\n )", "def _validate_config(self):\n pass", "def validate_configuration_file(self):\n\n with open(self.config_path, \"r+\") as f_config:\n return bool(re.search(get_configuration_file_re(),\n f_config.read()))", "def test_validate_config_file(self):\n ingest_mgmr = IngestManager()\n ingest_mgmr.validate_config_file(self.example_config_data)\n assert(ingest_mgmr.config is not None)\n assert (ingest_mgmr.config.config_data is not None)", "def validate_config(self):\r\n c = self.config\r\n \r\n # Make sure that we have a database_path, and an image_path...\r\n assert 'database_path' in c\r\n assert 'image_path' in c\r\n # We should probably check if these paths exist and make them as well...\r\n \r\n # Set the default values.\r\n graph_draw_frequency = c['graph_draw_frequency']\r\n for period, interval in self.default_config['graph_draw_frequency'].iteritems():\r\n graph_draw_frequency.setdefault(period, interval)\r\n \r\n # A quick check to make sure that our port is an integer.\r\n c['httpd_port'] = int(c['httpd_port'])\r\n \r\n # Make sure that no duplicate IDs exist, and that the template exists as well.\r\n ids = set()\r\n for graph in c['graphs']:\r\n graph.setdefault('config', {})\r\n graph['config'].setdefault('periods', [])\r\n assert graph['id'] not in ids\r\n ids.add(graph['id'])\r\n assert(template_exists(graph['template']))", "def _validate_configurations(self) -> None:\n if self.__exception:\n raise self.__exception", "def check_config_file(config_file_name, log_full_filename, log_file_path):\n try:\n if not os.path.isfile(config_file_name):\n # file not exist\n append_log_info(\"Config file not exists\", log_full_filename, log_file_path)\n return []\n else:\n try:\n config_content = []\n with open(config_file_name) as f:\n for line in f:\n comment = False\n try:\n if line.strip()[0] == \"#\":\n comment = True\n except:\n pass\n if not comment:\n line_split = line.split()\n if len(line_split) >= 5:\n config_content.append(line.split())\n return config_content\n except:\n append_log_info(\"Error file opening: \" + config_file_name, log_full_filename, log_file_path)\n return []\n except:\n append_log_info(\"Generic error on config file: \" + config_file_name, log_full_filename, log_file_path)\n return []", "def _validate_pf_config(module, config):\n rc, out, err = module.run_command(['ls', config])\n\n # Fail if no config file is present\n if rc != 0:\n msg_err = f'Error: Config file does not exist: {config}'\n module.fail_json(msg=msg_err)", "def _validate_against_schema(config):\n logging.info(\"Validating config file against the schema\")\n try:\n c = Core(source_data=config, schema_files=[CONFIG_SCHEMA])\n c.validate(raise_exception=True)\n except Exception as e:\n logging.error(\"Failed when validating schema: %s\", e)\n logging.info(\"Dumping rendered template:\\n%s\", dump_rendered_config_file(config))\n raise", "def parse_config(path):\n configuration = load_config(path)\n validate_config(configuration)\n return configuration", "def show_validation_error(\n file_path: Optional[Union[str, Path]] = None,\n *,\n title: Optional[str] = None,\n desc: str = \"\",\n show_config: Optional[bool] = None,\n hint_fill: bool = True,\n):\n try:\n yield\n except ConfigValidationError as e:\n title = title if title is not None else e.title\n if e.desc:\n desc = f\"{e.desc}\" if not desc else f\"{e.desc}\\n\\n{desc}\"\n # Re-generate a new error object with overrides\n err = e.from_error(e, title=\"\", desc=desc, show_config=show_config)\n msg.fail(title)\n print(err.text.strip())\n if hint_fill and \"value_error.missing\" in err.error_types:\n config_path = (\n file_path\n if file_path is not None and str(file_path) != \"-\"\n else \"config.cfg\"\n )\n msg.text(\n \"If your config contains missing values, you can run the 'init \"\n \"fill-config' command to fill in all the defaults, if possible:\",\n spaced=True,\n )\n print(f\"{COMMAND} init fill-config {config_path} {config_path} \\n\")\n sys.exit(1)\n except InterpolationError as e:\n msg.fail(\"Config validation error\", e, exits=1)", "def test_read_valid_configs(self):\n args = argparse.Namespace(server=None, force=False)\n with open(self._config) as config_f:\n with open(self._auth) as auth_config_f:\n (config_data, auth_tuple) = imageroller.main.read_configs(\n args,\n config_f,\n auth_config_f)\n self.assertEqual(config_data.concurrent_workers,\n CONFIG_DATA[\"ConcurrentWorkers\"])\n self.assertEqual(len(config_data.server_data), 1)\n self.assertTupleEqual(auth_tuple, (AUTH_DATA[\"ApiUser\"],\n AUTH_DATA[\"ApiKey\"]))", "def config_validate(ctx, **kwargs):\n # Validates pf9-express config file and obtains Auth Token\n #Load Active Config into ctx\n GetConfig(ctx).GetActiveConfig()\n #Get Token\n token = GetToken().get_token_v3(\n ctx.params[\"du_url\"],\n ctx.params[\"du_username\"],\n ctx.params[\"du_password\"],\n ctx.params[\"du_tenant\"] )\n if token is not None:\n click.echo('Config Validated!')\n click.echo('Token: %s' % token)\n else:\n click.echo('Config Validation Failed!')", "def _get_config_parsser(config_file):\n config_parser = Parser()\n # read() return a list of file name that read success\n open_success_file = config_parser.read(config_file)\n if len(open_success_file) == 0:\n return 1, \"can not open file: \" + config_file\n else:\n return 0, config_parser" ]
[ "0.73096585", "0.68011", "0.6606892", "0.638414", "0.6333747", "0.61956245", "0.6187002", "0.60878813", "0.6080821", "0.60753626", "0.60023487", "0.6002032", "0.5985557", "0.5974979", "0.5974979", "0.59661037", "0.5951037", "0.5942092", "0.59041005", "0.59037936", "0.5863309", "0.5843285", "0.58409756", "0.5829713", "0.58213145", "0.5798009", "0.57778144", "0.5764811", "0.57500845", "0.5749586" ]
0.795852
0
Loads a config file, or throw an exception if it is not valid
def load_config(config_file: Path) -> Dict[str, Any]: config, errors = check_config(config_file) if errors is not None: raise next(iter(errors)) # errors is None assert config is not None return config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cli_load_config(self, args) -> str:\n path = args.config_path\n if not os.path.isfile(path):\n return error(\"Path {} DNE\".format(path))\n\n try:\n self.config = config.from_file(path)\n return ok(\"Configuration loaded from {}\".format(path))\n except FileNotFoundError as err:\n return error(\"Could not load file: {}\".format(err))\n except json.JSONDecodeError as json_err:\n return error(\"Could not parse json file {}\".format(json_err))", "def load(file):\n _config.load(file)", "def load_config(path):\n # opens config file\n try:\n config = configparser.ConfigParser()\n config.read(path)\n return config\n except Exception as e:\n print(\"Error loading config file: \", e)\n sys.exit(1)", "def test_load_configuration_fails_gracefully_when_file_does_not_exist():\n config.load_configuration(invalid_configuration_path, graceful=True)\n assert True", "def load_config_file(self):\n\n conf_file = config.DEFAULT_CONFIGURATION_FILE\n\n if self.options and getattr(self.options, \"conf_file\"):\n conf_file = self.options.conf_file\n if (\n not os.path.exists(conf_file) and\n not os.path.exists(\"%s.d\" % conf_file)\n ):\n raise Exception(\n (\n \"The specified configuration file \"\n \"does not exist. File=(%s)\"\n ) % self.options.conf_file\n )\n\n self.from_file(conf_file)", "def load():\n # get (or create) config path\n p = initialize()\n return load_config(open(p['config']))", "def load(self, config: str) -> None:\n if not os.path.exists(config):\n raise RuntimeError(f\"Input configuration file {config} does not exist.\")", "def load_config(config=CONFIGFILE):\n\ttry:\n\t\tcfg = open(config, 'r').read()\n\t\treturn sanitize_config(json.loads(cfg))\n\texcept IOError as e:\n\t\t(errno, errstr) = e.args\n\t\tif errno == 2:\n\t\t\treturn DEFAULTCONFIG\n\t\telse:\n\t\t\tprint(\"An error occured opening the configuration file '%s':\"%(config),\n\t\t\t\t\tfile=stderr)\n\t\t\tprint(errstr, file=stderr)\n\t\t\texit(1)\n\texcept ValueError as e:\n\t\tif e.args[0] == 'No JSON object could be decoded':\n\t\t\tprint(\"An error occured reading the configuration file '%s'; please \\\nmake sure it is correct JSON\"%\n\t\t\t\t(config), file=stderr)\n\t\telse:\n\t\t\tprint(e)\n\t\texit(1)", "def test_load_configuration_raises_an_exception_when_file_does_not_exist():\n with pytest.raises(FileNotFoundError):\n config.load_configuration(invalid_configuration_path)", "def _load_config(file):\n try:\n return bb.parse.handle(os.path.join('conf', file), bb.data.init() )\n except IOError, e:\n return None", "def loadConfig(self, config_file):\r\n\r\n import json\r\n\r\n self.config = None\r\n\r\n try:\r\n with open(config_file) as f:\r\n self.config = json.load(f)\r\n except OSError as err:\r\n print(\"Unable to process {}, {}\".format(config_file, err))\r\n sys.exit(1)", "def read_config(filepath):\n try:\n return load_from_file(filepath)\n except (ValueError, yaml.reader.ReaderError, yaml.parser.ParserError) as ex:\n raise Failure(\"Config parsing error: \"+str(ex), 3, ex)", "def load_config(config_file=None):\n if Config.CONFIG:\n return Config.CONFIG\n else:\n try:\n if not config_file:\n config_file = os.path.join('./config/', 'bcl2fastq.config.yaml')\n Config.CONFIG = Config._load_yaml_config(config_file)\n return Config.CONFIG\n except IOError:\n raise IOError((\"There was a problem loading the configuration file. \"\n \"Please make sure that {0} exists and that you have \"\n \"read permissions\".format(config_file)))", "def _load_config_file(self, path: str) -> Dict[str, Any]:\n try:\n with open(path) as file:\n conf = json.load(file)\n except FileNotFoundError:\n raise OperationalException(\n f'Config file \"{path}\" not found!'\n ' Please create a config file or check whether it exists.')\n\n return conf", "def config_init(filename='./config.yml'):\n try:\n cfg = yaml.safe_load(open(filename))\n except IOError:\n msg = f'Loading config file ({filename}) failed.'\n raise IOError(msg)\n return cfg", "def load_config(filename):\n filepaths = []\n for dirpath in os.path.expanduser('~'), os.curdir, '':\n try:\n filepath = os.path.join(dirpath, filename)\n filepaths.append(filepath)\n with open(filepath, 'r') as f:\n return Config(yaml.safe_load(f))\n except IOError:\n pass\n raise IOError('Configuration file not found: ' + ', '.join(filepaths))", "def load_config_file(file: str) -> json:\n with open(file) as config:\n try:\n return json.load(config)\n except json.decoder.JSONDecodeError as err:\n raise Exception(\n f\"Couldn't load {config}: it is formatted incorrectly \"\n f\"on line {err.lineno} column {err.colno}\"\n ) from err", "def read_config(self, config_filename):", "def _load_config():\n fname = _get_config_fname()\n if fname is None or not op.isfile(fname):\n return dict()\n with open(fname, 'r') as fid:\n config = json.load(fid)\n return config", "def load_config():\n config = configparser.ConfigParser()\n config.read('config.ini')\n return config", "def load_config():\n config = ConfigParser()\n config.read(os.path.join(os.path.dirname(__file__), 'config.ini'))\n return config", "def test_loads_a_config_file(self):\n from test.resources import config\n self.assertIsInstance(config, type(sys))\n self.assertIsNotNone(config.example)\n self.assertEqual(config.example.config_option, 'config-value')", "def parse(self):\n try:\n with open(self.path, 'r') as ymlfile:\n self.__cfg = yaml.load(ymlfile)\n except IOError:\n self.log(\"File {0} not found -- aborting\".format(self.path))\n raise ConfigFileException", "def read_config():\n config = configparser.ConfigParser()\n if not os.path.exists(\"config.cfg\"):\n raise FileNotFoundError(\"configuration file (config.cfg) not found!\")\n config.read(\"config.cfg\")\n return config", "def load_config(config_file):\n try:\n with open('settings.json', 'r') as f:\n return json.loads(f.read())\n except (IOError, Exception) as e:\n print '%s' % e\n exit()", "def load_config(filename):\n with open(filename, \"r\") as stream:\n try:\n global CONFIG\n CONFIG = yaml.load(stream)\n except yaml.YAMLError as ex:\n print(ex)", "def load_config(self):\r\n with open('config.json', 'r') as f:\r\n self.config = json.load(f)", "def init_config(cls, path):\n try:\n config_string = open(path).read()\n except EnvironmentError as ex:\n LOGGER.error('Could not load %s file, error: %s', path, ex)\n sys.exit()\n\n try:\n cls.config = json.loads(config_string)\n except ValueError as ex:\n LOGGER.error(' %s file is not valid json, error: %s', path, ex)\n sys.exit()", "def load_config(self):\n if not self.config_file_path:\n return False\n with open(self.config_file_path) as f:\n self.config = yaml.load(f)\n return True", "def load_config(self, filename):\n # check if config file exists\n if not os.path.exists(filename):\n raise Exception(\"Can't find configuration {}.\".format(filename))\n # load config file\n config = configparser.ConfigParser()\n with open(filename, 'r') as config_file:\n config.read_file(config_file)\n return config" ]
[ "0.7601103", "0.74409306", "0.7427742", "0.73445207", "0.7344369", "0.73436534", "0.7332355", "0.7284374", "0.7253979", "0.72053397", "0.7204084", "0.7203225", "0.71599305", "0.7146686", "0.7126887", "0.7115745", "0.71139956", "0.7109619", "0.7101705", "0.7047698", "0.70289546", "0.7027252", "0.6978851", "0.6949398", "0.6947451", "0.6946608", "0.694087", "0.6929468", "0.6911559", "0.68876994" ]
0.76747537
0
Enter SwitcherBridge asynchronous context manager.
async def __aenter__(self) -> "SwitcherBridge": await self.start() return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def activate():\n refresh()\n activate_connection_with_mainloop(get_uuid())", "async def __aenter__(self):\n await self.start()", "async def __aenter__(self) -> 'BaseAgent':\n\n return await self.open()", "def switch():\n return Awaitable()", "def stasis_start_cb(channel, ev):\n await channel.answer()\n await bridge.addChannel(channel=channel.id)", "async def __aenter__(self):\n if not self._active:\n await self._setup()\n self._active = True\n self._entered += 1\n return self", "async def __aenter__(self):\n await self.connect()\n return self", "def main_thread_enter(self):\n ...", "async def __aenter__(self) -> \"HomeAssistantClient\":\n await self.connect()\n return self", "def __enter__(self):\n self.start()\n return self", "def __enter__(self):\n self.start()\n return self", "def switch(self, context):\n return", "async def start(self):", "async def start(self):", "def start(self):\n if self._start_event is None:\n _call_spawn_callbacks(self)\n hub = get_my_hub(self) # pylint:disable=undefined-variable\n self._start_event = hub.loop.run_callback(self.switch)", "async def start(self) -> None:", "def __enter__(self):\n self.connect()\n return self", "async def start_session(self):\n\t\t...", "def _handler_acquiring_status_enter(self):\n self._async_raise_fsm_event(ProtocolEvent.ACQUIRE_STATUS_ASYNC)", "def __enter__(self):\n self.boot()\n self.process_resources()\n self.run()\n return self", "def __enter__(self):\n self._logger.debug(\"__enter__()\")\n self.install(\"PRE\")", "def enter(self):\n log.debug(\"Entering context creator for GetDoer\")\n if not self.node.isRunning():\n conf = configs.get(self.port)\n self.node = get_node(self.port, **conf)", "def state_wait_enter(cfg, app, win):", "def activate(self):\n logging.info(\"{0} activated\".format(self.name))\n self.to_foreground()\n while self.in_foreground: #All the work is done in input callbacks\n self.idle_loop()\n return_value = self.get_return_value()\n logging.info(\"{} exited\".format(self.name))\n return return_value", "async def on_start(self):", "async def startup(self):", "async def startup(self):", "async def run(self) -> None:\n await self._mqtt.connect()\n LOGGER.info(\"Connected to MQTT Broker\")\n\n async with ClientSession() as websession:\n try:\n await self._setup_bridge(websession)\n except aiohue.errors.Unauthorized:\n LOGGER.error(\"Bridge rejected username. Please use --discover\")\n self.halt()\n return\n await self._publish_bridge_status()\n await self.main(websession)\n\n LOGGER.info(\"Disconnecting from MQTT Broker\")\n await self._publish_bridge_status(online=False)\n await self._mqtt.disconnect()", "def activate(self):\n logging.info(\"{0} activated\".format(self.name))\n self.to_foreground()\n while self.in_background: #All the work is done in input callbacks\n self.idle_loop()\n return_value = self.get_return_value()\n logging.info(\"{} exited\".format(self.name))\n return return_value", "def _async_start(self, _: HomeAssistant | None = None) -> None:\n self._reset_tracked_state()\n self._async_start_tracking()\n self.async_write_ha_state()" ]
[ "0.6332844", "0.6325681", "0.6321533", "0.6101696", "0.57948446", "0.5786901", "0.5741127", "0.5712348", "0.5669127", "0.5625606", "0.5625606", "0.562332", "0.55336386", "0.55336386", "0.55250365", "0.5493556", "0.5477489", "0.5465742", "0.54586947", "0.54450357", "0.5440988", "0.5433411", "0.5422235", "0.53863496", "0.5382602", "0.53768545", "0.53768545", "0.5356165", "0.535588", "0.5348553" ]
0.7491586
0
Extract the IP address from the type1 broadcast message (Heater, Plug).
def get_ip_type1(self) -> str: hex_ip = hexlify(self.message)[152:160] ip_addr = int(hex_ip[6:8] + hex_ip[4:6] + hex_ip[2:4] + hex_ip[0:2], 16) return inet_ntoa(pack("<L", ip_addr))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_ip_type2(self) -> str:\n hex_ip = hexlify(self.message)[154:162]\n ip_addr = int(hex_ip[0:2] + hex_ip[2:4] + hex_ip[4:6] + hex_ip[6:8], 16)\n return inet_ntoa(pack(\">L\", ip_addr))", "def ip_for_event (event):\n eth = dpid_to_str(event.dpid,True).split(\"|\")[0].replace(\"-\",\":\")\n return EthAddr(eth)", "def get_ip_address2(ifname):\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n return socket.inet_ntoa(fcntl.ioctl(\n s.fileno(),\n 0x8915, # SIOCGIFADDR\n struct.pack('256s', ifname[:15])\n )[20:24])\n except:\n return None", "def get_ip_string():\n return netifaces.ifaddresses('br0')[netifaces.AF_INET][0]['addr']", "def discoveryBroadcast(self):\n interfaces = netifaces.interfaces()\n for interface in interfaces:\n addrlist = netifaces.ifaddresses(interface)[netifaces.AF_INET]\n for addr in addrlist:\n if \"addr\" in addr and \"broadcast\" in addr:\n self.discoverysocket.sendto(str.encode(json.dumps({\"ip\": addr[\"addr\"], \"port\": self.tcpPort, \"host\": socket.gethostname()})), (addr[\"broadcast\"], 31338))", "def ipAddress():\n \n sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sk.connect((\"8.8.8.8\", 80))\n ip = (sk.getsockname()[0])\n sk.close()\n return str(ip)", "def get_interface_broadcast_addresses(self):\n broadcast_addresses = []\n ip_interfaces = self.get_interfaces_ip()\n for k, v in ip_interfaces.items():\n if 'ipv4' in v:\n ipv4_address_info = ip_interfaces[k]['ipv4']\n ip_addresses = ipv4_address_info.keys()\n for ip_address in ip_addresses:\n netmask = ip_interfaces[k]['ipv4'][ip_address]['prefix_length']\n ipv4_address = ipaddress.ip_interface(\"{}/{}\".format(ip_address, netmask))\n network = ipv4_address.network\n broadcast_addresses.append(str(network.broadcast_address))\n if 'ipv6' in v:\n ipv4_address_info = ip_interfaces[k]['ipv6']\n ip_addresses = ipv4_address_info.keys()\n for ip_address in ip_addresses:\n netmask = ip_interfaces[k]['ipv6'][ip_address]['prefix_length']\n ipv4_address = ipaddress.ip_interface(\"{}/{}\".format(ip_address, netmask))\n network = ipv4_address.network\n broadcast_addresses.append(str(network.broadcast_address))\n return broadcast_addresses", "def getData(self):\n if len(self.data)==2+4:\n return (socket.AF_INET,socket.inet_ntop(socket.AF_INET,self.data[2:]))\n elif len(self.data)==2+16:\n return (socket.AF_INET6,socket.inet_ntop(socket.AF_INET6,self.data[2:]))\n else:\n raise InvalidAddressTypeException(self)", "def discover(self, srv_port):\n addr = None\n answ = None\n\n # Creates a new datagram socket to broadcast\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)\n s.settimeout(self.timeout)\n s.sendto(REQ_HELLO, ('255.255.255.255', srv_port))\n\n # Wait for a server answer\n try:\n answ = s.recvfrom(1024)\n except socket.timeout:\n print 'Timeout exceeded...'\n\n # Close the diagram socket.\n s.close()\n\n if answ is not None and answ[0] == ANS_HELLO:\n # Saves the address if the server answer was correct.\n addr = answ[1]\n return addr", "def get_broadcast_addresses():\n addr_list = []\n if HAS_NETIFACES:\n for iface in netifaces.interfaces():\n addresses = netifaces.ifaddresses(iface).get(netifaces.AF_INET)\n if addresses is None:\n continue\n for address in addresses:\n broadcast_addr = address.get(\"broadcast\")\n if broadcast_addr is None:\n continue\n addr_list.append(broadcast_addr)\n return [\"127.0.0.1\", \"255.255.255.255\", \"<broadcast>\"] + addr_list", "def get_IPaddress():\n config = get_ifconfig()\n return config[0]", "def get_net_message():\n # TODO: refactor to use a list of events encoded using masgpack?\n try:\n message, address = serverSocket.recvfrom(1024)\n except:\n return None, None\n message = message.decode('utf-8')\n return message, address", "def decode(self,buf):\n eth = dpkt.ethernet.Ethernet(buf)\n pkt_len = len(buf)\n if(eth.type== dpkt.ethernet.ETH_TYPE_IP):\n ip = eth.data\n dst_ip = socket.inet_ntoa(ip.dst)\n src_ip = socket.inet_ntoa(ip.src)\n octet_list = string.split(dst_ip,'.')\n broadcast = False\n for o in octet_list:\n if (o == \"255\"):\n broadcast = True\n break\n if((octet_list[0] == \"224\") or (octet_list[0] == \"239\")):\n broadcast = True #Its multicast actually.\n if not broadcast:\n if(ip.p == dpkt.ip.IP_PROTO_TCP):\n pass\n elif(ip.p == dpkt.ip.IP_PROTO_UDP):\n udp =ip.data\n if((udp.dport == 53) or (udp.sport == 53)): # A request. \n if(udp.dport == 53): # A request. \n return self.dns_handler.handle_dns_request(ip.src,ip.dst,ip.p,udp.sport,udp.dport,udp.data)\n if(udp.sport == 53): # A DNS response\n self.dns_handler.handle_dns_response(ip.src,ip.dst,ip.p,udp.sport,udp.dport,udp.data)\n else:\n pass", "def getip(self):\n if configIpAddress == \"none\":\n strngtoXmit = 'M-SEARCH * HTTP/1.1' + '\\r\\n' + \\\n 'HOST: 239.255.255.250:1900' + '\\r\\n' + \\\n 'MAN: \"ssdp:discover\"' + '\\r\\n' + \\\n 'MX: 2' + '\\r\\n' + \\\n 'ST: urn:schemas-upnp-org:device:MediaRenderer:1' + '\\r\\n' + '\\r\\n'\n\n bytestoXmit = strngtoXmit.encode()\n sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sock.settimeout(3)\n gotstr = 'notyet'\n found = False\n ipaddress = None\n sock.sendto(bytestoXmit, ('239.255.255.250', 1900))\n try:\n gotbytes, addressport = sock.recvfrom(512)\n gotstr = gotbytes.decode()\n except:\n sock.sendto(bytestoXmit, ('239.255.255.250', 1900))\n if re.search('LG', gotstr):\n ipaddress, _ = addressport\n found = True\n self._state = STATE_PLAYING\n else:\n gotstr = 'notyet'\n sock.close()\n if not found:\n print(\"LG TV not found\")\n ipaddress = None\n self._state = STATE_OFF\n lgtv[\"ipaddress\"] = ipaddress\n else:\n lgtv[\"ipaddress\"] = configIpAddress\n if self.isOnline():\n self._state = STATE_PLAYING\n else:\n self._state = STATE_OFF", "def get_ip(ifn):\n sck = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n return socket.inet_ntoa(\n fcntl.ioctl(sck.fileno(), 0x8915, struct.pack(\"256s\", ifn[:15]))[20:24]\n )", "def ip_addr(self):\n return self.ip_addresses[0]", "def get_self_ip():\n\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect(('8.8.8.8', 80))\n ip = s.getsockname()\n s.close()\n return ip[0]", "def address_1(self):\n return self._address_1", "def ip():\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect((\"8.8.8.8\", 80))\n ip = s.getsockname()[0]\n s.close()\n return ip", "def ip(self, mess, args):\n return '%s\\n' % urlgrabber.urlread('http://whatismyip.org')", "def get_ip_address(ifname):\n # I did not write this function I give credit to this site\n # for it:\n # hpython-mysqldbttp://code.activestate.com/recipes/439094/\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, # SIOCGIFADDR\n struct.pack('256s', ifname[:15])\n )[20:24])", "def _parse_inet(line):\n tokens = line.split()\n return netaddr.IPNetwork(tokens[1])", "def _guess_lan_address():\n blacklist = [\"127.0.0.1\", \"0.0.0.0\", \"255.255.255.255\"]\n for interface in netifaces.interfaces():\n addresses = netifaces.ifaddresses(interface)\n for option in addresses.get(netifaces.AF_INET, []):\n if \"broadcast\" in option and \"addr\" in option and not option[\"addr\"] in blacklist:\n if __debug__: dprint(\"interface \", interface, \" address \", option[\"addr\"])\n return option[\"addr\"]\n #Exception for virtual machines/containers\n for interface in netifaces.interfaces():\n addresses = netifaces.ifaddresses(interface)\n for option in addresses.get(netifaces.AF_INET, []):\n if \"addr\" in option and not option[\"addr\"] in blacklist:\n if __debug__: dprint(\"interface \", interface, \" address \", option[\"addr\"])\n return option[\"addr\"]\n dprint(\"Unable to find our public interface!\", level=\"error\")\n return None", "def local_ip(self) -> Optional[str]:\n if not self._send_parse_reply(b\"AT+IPADDR\", b\"+IPADDR:\"):\n return None\n return self._buf", "def extract(self, str):\n\n ips = re.match( r'^[0-9]+(?:\\.[0-9]+){3}', str)\n\n if ips:\n return ips.group(0)", "def get_interface_ip_address(peer=\"8.8.8.8\"):\n\n sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sock.connect((peer, 1))\n interface_ip_address = sock.getsockname()[0]\n sock.close()\n\n return interface_ip_address", "def detect_ip_address():\n # Rather hackish way to get the local ip-address, recipy from\n # https://stackoverflow.com/a/166589\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect((\"8.8.8.8\", 80))\n ip_address = s.getsockname()[0]\n s.close()\n return ip_address", "def get_ip():\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n try:\n # doesn't even have to be reachable\n s.connect(('10.255.255.255', 1))\n ip = s.getsockname()[0]\n except Exception:\n ip = '127.0.0.1'\n finally:\n s.close()\n return ip", "def handle_ip(bot, ievent):\n try:\n item = ievent.args[0]\n except IndexError:\n ievent.missing('<hostname>')\n return\n try:\n ipnr = socket.gethostbyname(item)\n ievent.reply(ipnr)\n except:\n ievent.reply(\"can't match \" + str(item))", "def getMyIP():\r\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n s.connect(('8.8.8.8', 1)) # connect() for UDP doesn't send packets\r\n return s.getsockname()[0]" ]
[ "0.62573063", "0.5983005", "0.59109694", "0.5889796", "0.5855139", "0.5791712", "0.5750377", "0.57353413", "0.57214594", "0.5695647", "0.5687465", "0.5656045", "0.56231207", "0.5519996", "0.5519413", "0.5513121", "0.54990864", "0.5491366", "0.5478653", "0.5467143", "0.54567075", "0.54170424", "0.54157126", "0.54108644", "0.54072654", "0.539839", "0.53969806", "0.5390709", "0.5386357", "0.5386204" ]
0.70770574
0
Extract the device id from the broadcast message.
def get_device_id(self) -> str: return hexlify(self.message)[36:42].decode()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_device_id_from_event(event):\n return event.message.annotations[\"iothub-connection-device-id\".encode()].decode()", "def device_id(self):\n return self._id[0]", "def device_id(self) -> str:\n return self._device_info[\"ID\"]", "def device_id(self):\n return self._annotations.get(EventData.PROP_DEVICE_ID, None)", "def get_device_id(self):\n\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['tracking_id'].attrs['device_id']\n\t\texcept:\n\t\t\treturn None", "def device_id(self):\n data = fcntl.ioctl(self._fd, _EVIOCGID, '\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00')\n idbus, idvendor, idproduct, idversion = struct.unpack(\"hhhh\", data)\n return idbus, idvendor, idproduct, idversion", "def device_id(self) -> Optional[str]:\n return self.relay(\"device_id\")", "def get_device_id(self) -> str:\n return Config.get('device_id')", "def device_id(self):\n return self.unique_id", "def device_id(self):\n return self._device_id", "def _get_device_id_from_registered(api) -> str:\n\n try:\n api.oauth_login(\"bad\")\n except InvalidDeviceId as original_exception:\n error_message = original_exception.args[0]\n\n device_ids_str = error_message.split(\"Your valid device IDs are:\")[-1]\n device_ids = device_ids_str.split(\"\\n\")\n device_ids = [device_id.replace(\"* \", \"\") for device_id in device_ids]\n return device_ids[-1]", "def get_device_id(group_id: int, identifier: str) -> int:\n return Murmur3().get_device_id(group_id, identifier)", "def _get_device_id(api: Mobileclient) -> str:\n\n try:\n _get_device_id_from_environment()\n except KeyError:\n pass\n\n return _get_device_id_from_registered(api)", "def get_device_id_by_name(self, device_name):\n\n return self.get_device_by_name(device_name).id", "def msg_id(self):\n return struct.unpack('<H', self.pkt.payload[0:2])[0]", "def _get_vendor_id(device_dict):\n return device_dict['vendor_id'].split()[0].split('x')[-1]", "def unique_id(self):\n return self._deviceId", "def _get_product_id(device_dict):\n return device_dict['product_id'].split('x')[-1]", "def unique_id(self):\n return self._device_id", "def get_id(self):\n try:\n return self.inst.query('*IDN?')[:36]\n except errors.VisaIOError as e:\n logger.warning(e)\n return 'Device not connected.'", "def _get_device_id_from_environment() -> str:\n\n return os.environ[\"GOOGLE_MUSIC_DEVICE_ID\"]", "def unique_id(self):\n return self.device_id", "def getMessageID(self):\n return self._payload[1]", "def get_device_id(self, device_index):\n return self.drt_manager.get_id_from_index(device_index)", "def map_device_to_id(dev_map, device):\n for elem in dev_map:\n if device == elem['path']:\n return elem['id']", "def message_id(self) -> str:\n return self[\"Sns\"][\"MessageId\"]", "def unique_id(self) -> Optional[str]:\n return self._device.device_id", "def getDeviceID(self, unitCode=0):\n resp = self.XAPCommand('DID', unitCode=unitCode)\n return int(resp)", "def device_session_identifier(self):\n return self._device_session_identifier", "def get_device_id(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)" ]
[ "0.70864534", "0.66141975", "0.6504654", "0.64821935", "0.6376667", "0.6334644", "0.6326233", "0.62108934", "0.6189589", "0.6149342", "0.6094864", "0.6069741", "0.6030198", "0.59717447", "0.5965991", "0.59284145", "0.58982235", "0.5707103", "0.5660862", "0.56576896", "0.56511986", "0.56510496", "0.56193125", "0.56087166", "0.5451222", "0.53966844", "0.53857696", "0.53801924", "0.5339044", "0.533857" ]
0.71858644
0
Extract the device state from the broadcast message.
def get_device_state(self) -> DeviceState: hex_device_state = hexlify(self.message)[266:268].decode() return ( DeviceState.ON if hex_device_state == DeviceState.ON.value else DeviceState.OFF )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_device_state(self, device_name):\n device_info = self.data.get(device_name)\n return device_info.get('state') if device_info else None", "def device_state_attributes(self):\n return {\"uuid\": self.uuidAction,\n \"room\": self.room,\n \"category\": self.cat,\n \"device_typ\": self.type,\n \"plattform\": \"loxone\"}", "def device_state_attributes(self):\n return {\"uuid\": self.uuidAction, \"room\": self.room,\n \"category\": self.cat,\n \"device_typ\": self.type, \"plattform\": \"loxone\"}", "def state(self):\n return self.coordinator.data[METER_DEVICE_TYPE][self.base_unique_id][METER_STATE]", "def get_state(self):\n ret = self.send(\"?S\", recv=True)\n assert ret in \"WDR\"\n return ret", "def device_state_attributes(self):\n return self._hass.data[DATA_UPCOMING]", "def device_state_attributes(self):\n return {\n 'addr': self._device.addr,\n 'ieee': self._device.ieee,\n 'endpoint': '0x{:02x}'.format(self._endpoint),\n }", "def device_state_attributes(self):\n # Move these to Thermostat Device and make them global\n return {\n \"current_humidity\": self._current_humidity,\n \"status\": self._current_state,\n \"program\": self._current_program,\n \"away_mode\": self._away\n }", "def state(self):\n return self.device.device_data[self.device_id][self._sensor_type]", "def _read_device_state():\n \n try:\n _debug_print(\"Connecting to bus...\")\n i2c_bus = smbus.SMBus(_bus_id)\n\n current_state = i2c_bus.read_byte(_device_addr) & 0x0F\n\n return int(current_state)\n\n except:\n print(\"Error: There was a problem reading from the device\")\n # Best to re-raise as we can't recover from this\n raise", "def state(self):\n return self.coordinator.data[PVS_DEVICE_TYPE][self.base_unique_id][PVS_STATE]", "def device_state_attributes(self):\n return {\n ATTR_ATTRIBUTION: ATTRIBUTION,\n ATTR_LAST_UPDATE: self.metoffice_now.date if self.metoffice_now else None,\n ATTR_SENSOR_ID: self._type,\n ATTR_SITE_ID: self.metoffice_site_id if self.metoffice_site_id else None,\n ATTR_SITE_NAME: self.metoffice_site_name\n if self.metoffice_site_name\n else None,\n }", "def broadcast():\n # global receiving_message\n # if not receiving_message:\n router.broadcast(clients.copy(), json.dumps(current_state))", "def device_state_attributes(self):\n tmp = self._boiler.__dict__.items()\n return {'status': {k: v for k, v in tmp if k in GH_STATE_ATTRS}}", "def device_state_attributes(self):\n if self._data is not None:\n return {\n \"阳历\": self._data.yangli,\n \"阴历\": self._data.yinli,\n \"五行\": self._data.wuxing,\n \"冲煞\": self._data.chongsha,\n \"百忌\": self._data.baiji,\n \"吉神\": self._data.jishen,\n \"宜\": self._data.yi,\n \"凶神\": self._data.xiongshen,\n \"忌\": self._data.ji,\n }", "def get_state(self):\n self.request_state()\n e = self.get_event()\n if e.id != ID_STATE:\n raise GrblEventError(e)\n return e.data", "def device_state_attributes(self):\n return self._device.status", "def __read_device(self):\n state = XinputState()\n res = self.manager.xinput.XInputGetState(\n self.__device_number, ctypes.byref(state))\n if res == XINPUT_ERROR_SUCCESS:\n return state\n if res != XINPUT_ERROR_DEVICE_NOT_CONNECTED:\n raise RuntimeError(\n \"Unknown error %d attempting to get state of device %d\" % (\n res, self.__device_number))\n # else (device is not connected)\n return None", "def get_device_state(self):\n\t\treturn call_sdk_function('PrlSrvCfgDev_GetDeviceState', self.handle)", "def device_state_attributes(self):\n return {\"uuid\": self.uuidAction, \"room\": self.room,\n \"category\": self.cat,\n \"selected_scene\": self.effect,\n \"device_typ\": self.type, \"plattform\": \"loxone\"}", "def state(self):\n return self.coordinator.data[INVERTER_DEVICE_TYPE][self.base_unique_id][INVERTER_STATE]", "def state(self):\n return self.device.value()", "def process_broadcast(data):\n logger.info(f\"Broadcast: {data}\")", "def state(self):\n return self._device.value", "def device_state_attributes(self) -> Dict[str, any]:\n return self._device.state_attributes", "def device_state_attributes(self):\n return {\n \"attribution\": ATTRIBUTION,\n \"id\": str(self.coordinator.data.get(\"id\")),\n \"integration\": DOMAIN,\n }", "def state(self):\n return self.msg.state", "def device_state_attributes(self):\r\n return self.attributes", "def state(self):\n return self.device.status(station=self.station_number)", "def device_state_attributes(self): # Can be remove from 0.99\n return self._attr" ]
[ "0.59893084", "0.57019615", "0.56932735", "0.56579196", "0.5648592", "0.5616866", "0.555145", "0.5544498", "0.5493315", "0.54372776", "0.5397859", "0.53942513", "0.53653306", "0.53638494", "0.536375", "0.5355793", "0.5349352", "0.53458685", "0.53340346", "0.5317107", "0.5308376", "0.5292879", "0.52855563", "0.5273639", "0.5232657", "0.5226602", "0.5216926", "0.5201488", "0.5192331", "0.5187878" ]
0.65081036
0
Extract the auto shutdown value from the broadcast message.
def get_auto_shutdown(self) -> str: hex_auto_shutdown_val = hexlify(self.message)[310:318] int_auto_shutdown_val_secs = int( hex_auto_shutdown_val[6:8] + hex_auto_shutdown_val[4:6] + hex_auto_shutdown_val[2:4] + hex_auto_shutdown_val[0:2], 16, ) return seconds_to_iso_time(int_auto_shutdown_val_secs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_heartbeat_message(self):\n return self.messages[\"heartbeat\"].get()", "def shutdown(self):\n return self._read(MX_SHUTDOWN)", "def pull(self):\n \n data = self.s.recv(1024)\n if data:\n info = json.loads(data.decode()) \n print(\"DATA FROM BROKER : \", info)\n \n return info.get(\"topic\"), info.get(\"value\")\n pass", "def pull(self):\n \n data = self.s.recv(1024)\n if data:\n info = pickle.loads(data) \n \n return info.get(\"topic\"), info.get(\"value\")\n pass", "def payload_off(self):\n \n return \"OFF\"", "def get_bootvar(self):\n module = 'bootimage/oper'\n method = 'GET'\n response = self.axapi_call(module, method)\n bootdefault = response.json()['bootimage']['oper']['hd-default']\n print(self.device + ' The device is set to boot from: ' + bootdefault + ' in the future')\n return bootdefault", "def getShutdownFlag(self):\n return self._shutdownFlag", "def shutdown(self):\n return self.packet().close().send()", "def scale_down_unready_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"scale_down_unready_time\")", "def shutdown_on_idle(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"shutdown_on_idle\")", "def shutdown_on_idle(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"shutdown_on_idle\")", "def get_auto_stop(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetAutoStop', self.handle)", "def get_last_ack(self):\n\t\treturn self.last_ack", "def sc_auto_unsuspend_delay(self):\n return self._sc_auto_unsuspend_delay", "def value(self) -> Optional[Sequence['outputs.LiveEventStreamEventResponse']]:\n return pulumi.get(self, \"value\")", "def get_soft_shutdown_settle_down_duration(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def sleep(self, value):\n return self._i2c_write(_SHUTDOWN_REGISTER, not value, bank=_CONFIG_BANK)", "def GetForegroundValue(self) -> \"short\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_GetForegroundValue(self)", "def get_humidity(self):\n return self._sense_hat.get_humidity()", "def GetForegroundValue(self) -> \"short\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_GetForegroundValue(self)", "def GetForegroundValue(self) -> \"short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_GetForegroundValue(self)", "def GetForegroundValue(self) -> \"unsigned short\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_GetForegroundValue(self)", "def shutdown(self, value):\n self._write(MX_SHUTDOWN, value)", "def GetForegroundValue(self) -> \"unsigned short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_GetForegroundValue(self)", "def get_discovery_message(self):\n return self.messages[\"discovery\"].get()", "def GetForegroundValue(self) -> \"unsigned char\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_GetForegroundValue(self)", "def shutdown_mosfet(self) -> str:\r\n return self.send_command(self._MOSFET_SHUTDOWN)", "def getAutoSmoothEventValue(*args, **kwargs):\n pass", "def getSTOP(self):\n return self.listener.STOP", "def GetForegroundValue(self) -> \"unsigned short\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_GetForegroundValue(self)" ]
[ "0.5499507", "0.5274515", "0.49070346", "0.4894385", "0.48835716", "0.48745817", "0.48557547", "0.48136777", "0.47831786", "0.4762295", "0.4762295", "0.47481573", "0.4739972", "0.47129744", "0.47009182", "0.46813038", "0.46596712", "0.46524695", "0.46379837", "0.462631", "0.46253997", "0.4621361", "0.46122542", "0.46061873", "0.46057466", "0.46007857", "0.45978308", "0.4593072", "0.45888332", "0.45780715" ]
0.6372114
0
Extract the power consumption from the broadcast message.
def get_power_consumption(self) -> int: hex_power_consumption = hexlify(self.message)[270:278] return int(hex_power_consumption[2:4] + hex_power_consumption[0:2], 16)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_power(self, channel):\n\n power = self.device.query(f':POW{channel}:VAL?')\n return float(power)", "def get_power_values(self):\n x=self.send_packet_check_response('\\x60')\n return struct.unpack('<iiih',x)", "def get_power(self):\r\n _debug('simq03b_api.get_power')\r\n \r\n x = self.query('POWer?')\r\n if x == None: return None\r\n return float(x)", "def get_power(self):\r\n return self._api.get_power()", "def get_power(self) -> float:\n\n #:READ[n][:CHANnel[m]][:SCALar]: POWer[:DC]?\n return float(self._inst.query(\":READ:POW?\"))", "def read_power(self):\n return(self.power)", "def _response_power_buffer(self, message):\n if message.logaddr.value == self._last_log_address:\n self._last_log_collected = True\n # Collect logged power usage\n for i in range(1, 5):\n if getattr(message, \"logdate%d\" % (i,)).value != None:\n dt = getattr(message, \"logdate%d\" % (i,)).value\n if getattr(message, \"pulses%d\" % (i,)).value == 0:\n self.power_history[dt] = 0.0\n else:\n self.power_history[dt] = self.pulses_to_kWs(\n getattr(message, \"pulses%d\" % (i,)).value, 3600\n )\n # Cleanup history for more than 2 day's ago\n if len(self.power_history.keys()) > 48:\n for dt in list(self.power_history.keys()):\n if (dt + self.stick.timezone_delta - timedelta(hours=1)).date() < (\n datetime.now().today().date() - timedelta(days=1)\n ):\n del self.power_history[dt]\n # Recalculate power use counters\n last_hour_usage = 0\n today_power = 0\n yesterday_power = 0\n for dt in self.power_history:\n if (dt + self.stick.timezone_delta) == datetime.now().today().replace(\n minute=0, second=0, microsecond=0\n ):\n last_hour_usage = self.power_history[dt]\n if (\n dt + self.stick.timezone_delta - timedelta(hours=1)\n ).date() == datetime.now().today().date():\n today_power += self.power_history[dt]\n if (dt + self.stick.timezone_delta - timedelta(hours=1)).date() == (\n datetime.now().today().date() - timedelta(days=1)\n ):\n yesterday_power += self.power_history[dt]\n if self.power_consumption_prev_hour != last_hour_usage:\n self.power_consumption_prev_hour = last_hour_usage\n self.do_callback(SENSOR_POWER_CONSUMPTION_PREVIOUS_HOUR[\"id\"])\n if self.power_consumption_today != today_power:\n self.power_consumption_today = today_power\n self.do_callback(SENSOR_POWER_CONSUMPTION_TODAY[\"id\"])\n if self.power_consumption_yesterday != yesterday_power:\n self.power_consumption_yesterday = yesterday_power\n self.do_callback(SENSOR_POWER_CONSUMPTION_YESTERDAY[\"id\"])", "def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)", "def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)", "def get_power_management() -> int:", "def get_power_usage(self):\n if self.pulses_1s is None:\n return None\n return self.pulses_to_kWs(self.pulses_1s) * 1000", "def PM_getPower(self,channel,unit='W'):\n if unit not in ApexAP1000.PM_UNIT:\n raise ValueError('Unknow physical unit during power measurement')\n if channel not in ApexAP1000.PM_CHANNELS:\n raise ValueError('Unknow channel during power measurement')\n str = {'W':'MW','mW':'MW','dBm':'DBM'}\n value = float(self.ask(self.headStr('PM')+'%s[%d]?'%(str[unit],channel)))\n if unit is 'W':\n value = value * 1e-3\n return value", "def get_power(self):\r\n return self.p", "def _get_multicastWeight(self):\n return self.__multicastWeight", "async def get_power(self):\n if not self._current_power_supported:\n return 0\n\n try:\n value = await self._get_config(STATE_POWER_V1)\n return value[STATE_POWER_V1]\n except (ValueError, InvalidRequestError):\n # Device does not support whole unit instant power usage\n self._current_power_supported = False\n return 0", "def calculate_signal_power(self, sender, freq_range):\r\n distance = np.sqrt(\r\n np.power(self.x - sender.x, 2) + np.power(self.y - sender.y, 2))\r\n avg_frequency = np.average(freq_range) * 1e6\r\n wavelength = settings.speed_of_light / avg_frequency\r\n received_signal_power = (\r\n sender.tx_power * sender.gain * self.gain * np.power(\r\n wavelength, 2)) / np.power(4 * np.pi * distance, 2)\r\n return received_signal_power", "def get_power(self):\r\n x = self.query('POW?')\r\n if x == None: return None\r\n return float(x)", "def measurePower(self, avenum=5):\n if 'avenum' in self.parameters:\n avenum = int(self.parameters['avenum'])\n counts = self.com.command('GET PWR', avenum, nreply=2)\n return [((c-self.atcPedestal) * self.adcCountTomV - self.voltageShift) * self.mVTodBm + self.powerShift for c in counts]", "def get_runtime(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. .*? .*? .*? .*? .*? . .*? (.*?) . . . .*?'\n runtime = re.findall(pattern,summary).pop() \n hms = runtime.split(':')\n msg = 'Battery has been enabled for {}h, {}m, and {}s.'\n print(msg.format(hms[0],hms[1],hms[2]))\n return hms", "def get_power():\n return float(cmd(\"pa?\"))", "def power(self) -> int:\n return self._power_consumption", "def read_measurement(self):\n return self.execute(SdpI2cCmdReadMeasurement())", "def ParseSamplingOutput(powermonitor_output):\n power_samples = []\n total_energy_consumption_mwh = 0\n def ParseSample(sample):\n values = [float(x) for x in sample.split(' ')]\n res = {}\n (res['timestamp_s'],\n res['charge_nah'],\n res['current_ua'],\n res['voltage_uv']) = values\n return res\n # The output contains a sample per line.\n samples = map(ParseSample, powermonitor_output.split('\\n')[:-1])\n # Keep track of the last sample that found an updated reading.\n last_updated_sample = samples[0]\n # Compute average voltage.\n voltage_sum_uv = 0\n voltage_count = 0\n for sample in samples:\n if sample['charge_nah'] != last_updated_sample['charge_nah']:\n charge_difference_nah = (sample['charge_nah'] -\n last_updated_sample['charge_nah'])\n # Use average voltage for the energy consumption.\n voltage_sum_uv += sample['voltage_uv']\n voltage_count += 1\n average_voltage_uv = voltage_sum_uv / voltage_count\n total_energy_consumption_mwh += (-charge_difference_nah *\n average_voltage_uv / 10 ** 12)\n last_updated_sample = sample\n voltage_sum_uv = 0\n voltage_count = 0\n # Update average voltage.\n voltage_sum_uv += sample['voltage_uv']\n voltage_count += 1\n # Compute energy of the sample.\n energy_consumption_mw = (-sample['current_ua'] * sample['voltage_uv'] /\n 10 ** 9)\n\n power_samples.append(energy_consumption_mw)\n # Because the data is stalled for a few seconds, compute the remaining\n # energy consumption using the last available current reading.\n last_sample = samples[-1]\n remaining_time_h = (\n last_sample['timestamp_s'] - last_updated_sample['timestamp_s']) / 3600\n average_voltage_uv = voltage_sum_uv / voltage_count\n\n remaining_energy_consumption_mwh = (-last_updated_sample['current_ua'] *\n average_voltage_uv *\n remaining_time_h / 10 ** 9)\n total_energy_consumption_mwh += remaining_energy_consumption_mwh\n\n # -------- Collect and Process Data -------------\n out_dict = {}\n # Raw power usage samples.\n out_dict['identifier'] = 'ds2784'\n out_dict['power_samples_mw'] = power_samples\n out_dict['energy_consumption_mwh'] = total_energy_consumption_mwh\n\n return out_dict", "def process_wifi_com(self, wm):\n print wm.message", "def _extract_charge(self, event):\n\n # copy the waveform be cause we do not want to change it for the moment\n waveforms = np.copy(event.r1.tel[self.tel_id].waveform)\n\n # pedestal event do not have gain selection\n no_gain_selection = np.zeros((waveforms.shape[0], waveforms.shape[1]), dtype=np.int64)\n no_gain_selection[1] = 1\n n_pixels = 1855\n\n # correct the r1 waveform for the sampling time corrections\n if self.time_sampling_corrector:\n waveforms *= (self.time_sampling_corrector.get_corrections(event, self.tel_id)\n [no_gain_selection, np.arange(n_pixels)])\n\n # Extract charge and time\n charge = 0\n peak_pos = 0\n if self.extractor:\n broken_pixels = event.mon.tel[self.tel_id].pixel_status.hardware_failing_pixels\n dl1 = self.extractor(waveforms, self.tel_id, no_gain_selection, broken_pixels=broken_pixels)\n charge = dl1.image\n peak_pos = dl1.peak_time\n\n return charge, peak_pos", "def get_energy(self):\n return self.bot_client.send_command(_Command.GetEnergy)", "def current_power_w(self):\n if self._devtype == \"pod\":\n return self._current_consumption\n return False", "def _get_bpm_from_soundstretch(output):\n \n output = output.split(\"\\n\")\n for line in output:\n if 'Detected BPM rate ' in line:\n bpm = line[18:]\n return float(bpm)\n return None # Could not parse output", "def read_wpm_counter(self):\n return self.WPM_COUNTER", "def getEnergyAdded(self):\n return self.json_state.get(\"charging\").get(\"wh_energy\")" ]
[ "0.6030356", "0.60249543", "0.5813906", "0.5777191", "0.5746", "0.5744297", "0.5712377", "0.57049614", "0.57049614", "0.5549503", "0.5493961", "0.54691863", "0.54381156", "0.53966916", "0.5382752", "0.5351965", "0.53316444", "0.5309858", "0.5301612", "0.52832156", "0.5234166", "0.5160572", "0.5123244", "0.5119546", "0.5116882", "0.5108819", "0.5046086", "0.5037366", "0.5029367", "0.5017415" ]
0.6581233
0
Extract the device type from the broadcast message.
def get_device_type(self) -> DeviceType: hex_model = hexlify(self.message[74:76]).decode() devices = dict(map(lambda d: (d.hex_rep, d), DeviceType)) return devices[hex_model]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def device_type(self):\n return Context.devtype2str[self.device_typeid]", "def device_type(self):\n return Context.devtype2str[self.device_typeid]", "def device_type(self):\n return self._meta['device_type']", "def device_type(self):\n # type: () -> string_types\n return self._device_type", "def device_type(self):\n return self._device_type", "def get_device_type(self):\r\n # int32 __CFUNC DAQmxGetDevProductType(const char device[], char *data,\r\n # uInt32 bufferSize)\r\n bufsize = 1024\r\n buf = ctypes.create_string_buffer(bufsize)\r\n NIDAQ_dll.DAQmxGetDevProductType(\r\n self.dev_id.encode('ascii'), ctypes.byref(buf), bufsize)\r\n return buf_to_list(buf)[0]", "def type(self):\n return self._device.type_name", "def device_class(self):\n return SENSOR_TYPES[self.sensor][3].get(\"device_class\")", "def type(self) -> str:\n return self._device_info[\"Type\"]", "def device_type(self) -> str:\n if self.android_feature_phone():\n return 'smartphone'\n\n dt = self.all_details.get('device', {}).get('type', '')\n if dt:\n return dt\n\n aat = self.android_device_type()\n if aat:\n return aat\n\n if self.windows_tablet():\n return 'tablet'\n\n if self.is_television():\n return 'tv'\n\n if self.is_desktop():\n return 'desktop'\n\n if self.opera_tablet():\n return 'tablet'\n\n return ''", "def device_type(devices):\n num_of_types = len({type(device) for device in devices})\n if num_of_types == 1:\n return devices[0].list_type.replace(\"_\", \" \").title()\n elif num_of_types == 0:\n return None\n else:\n raise ValueError", "def device_class(self):\n return BINARY_SENSORS[self.info_type][\"device_class\"]", "def device_class(self) -> str | None:\n return self._get_sensor_type()[2]", "def device_type(self) -> str:\n return self.profile_device.device_type", "def __get_device_type_name(self, mps_db_session, device_type_id):\n device_type = mps_db_session.query(models.DeviceType).filter(models.DeviceType.id==device_type_id).all()\n\n if len(device_type) == 1:\n return device_type[0].name\n elif len(device_type) == 0:\n raise ValueError(\"Function \\\"__get_device_type_name(device_type_id={}). Not fault was found.\\\"\"\n .format(device_type_id))\n else:\n raise ValueError(\"Function \\\"__get_device_type_name(device_type_id={}). More than one device matches.\\\"\"\n .format(device_type_id))", "def device_type(self):\r\n return self._arm.device_type", "def device_class(self):\n if self._type in SENSOR_TYPES:\n return self._type\n return None", "def device_class(self):\n return SENSOR_TYPES[self._type][3] if self._type in SENSOR_TYPES else None", "def device_class(self):\n return self.sensor_type[\"class\"]", "def recv_type(self, type_):\n msg = self.recv()\n assert msg and msg['type'] == type_, msg\n return msg", "def device_class(self):\n return self.type", "def device_class(self):\n return self.type", "def device_class(self):\n return self.type", "def get_sensor_type(self):\n return self.data[1][:-1]", "def device_class(self):\n return self._device_type", "def model(self):\n return self.device.settings[\"device\"][\"type\"]", "def model(self):\n return self.device.settings[\"device\"][\"type\"]", "def device_class(self):\n return SENSOR_TYPES[self._type][1]", "def device_class(self):\n return SENSOR_TYPES[self._type][1]", "def pkt_type(self):\n return uint16_packer.unpack(self[32:34])[0]" ]
[ "0.625213", "0.625213", "0.62468296", "0.61289835", "0.5936063", "0.588411", "0.58595324", "0.5773678", "0.57696486", "0.5759056", "0.57496417", "0.57362485", "0.57100964", "0.5695006", "0.5679305", "0.56452966", "0.5604981", "0.5602389", "0.5600383", "0.5584659", "0.5575527", "0.5575527", "0.5575527", "0.55733967", "0.5545919", "0.5533343", "0.5533343", "0.55166095", "0.55166095", "0.5511383" ]
0.6615226
0
Return the current position of the shutter 0 <= pos <= 100.
def get_shutter_position(self) -> int: hex_pos = hexlify(self.message[135:137]).decode() return int(hex_pos[2:4]) + int(hex_pos[0:2], 16)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_position(self, position):", "def pos(self):\n return (self.raw - self.raw_zero) / self.ratio", "def get_pos(self):\r\n return self.pos", "def getTilePos(self, pos = None):\n\n if not pos:\n pos = self.actor.getPos()\n \n for i in range(len(pos)):\n pos[i] = int(math.floor( (pos[i] + self.dimensions[i]) / 2.0))\n #pos[i] = int(math.floor( pos[i] / 2.0))\n\n return pos", "def _get_pos(self):\n return self._pos", "def get_pos(self):\n return self.pos", "def getPosition(self):\n return self.x", "def current_cover_position(self):\n state = self.channel_data.get(\"state\")\n if state:\n return 100 - state[\"shut\"]\n return None", "def __calcPos(self, x):\n pos = (x - self.slider_x)/(self.canv_W-2*self.slider_x)\n if pos<0:\n return 0\n elif pos>1:\n return 1\n else:\n return pos", "def getPos(self):\n return self.__current_pos", "def pos(self, value):\n self.uw.send('%s.val = %.4f smooth:2' % (self.name, (self.clipPosLimits(value) * self.ratio) + self.raw_zero))", "def get_base_pos_on_screen(self,position):\n\n return self.seq_xstart+float(position-1)*self.base_scale.get(),self.seq_row", "def get_position(self):\n position = (self.position_x * SPRITE_SIZE, self.position_y * SPRITE_SIZE)\n return position", "def position(self):\n if self.p:\n if self._finished:\n return None\n return self.p.get_position()*10", "def aa_pos(self, pos):\n return self.nt_pos(pos) // 3", "def x(self):\r\n return self.position.x", "def get_pos_in_pixels(self):\n pixelpos = Vector(self.pos.x * 32, -self.pos.y * 32)\n return pixelpos + self.offset", "def convert_to_pygame(pos):\n return int(pos.x), int(-pos.y+600)", "def pos(self):\n return self._pos", "def pos(self):\n return self._pos", "def current_cover_position(self):\n return self._device.level * 100.0", "def pos(self):\n return self.info['value']", "def Drag(self, mouse_pos):\r\n point_pos = mouse_pos[0]\r\n if mouse_pos[0] < self.pos[0]: point_pos = self.pos[0]\r\n if mouse_pos[0] > self.pos[0] + self.barsize : point_pos = self.pos[0] + self.barsize\r\n num = int((point_pos - self.pos[0])/(self.barsize/self.range))\r\n self.pointrect = pygame.Rect((point_pos, self.pos[1] - 15, 40, 40))\r\n return num", "def screen_coordinates(pos):\n\n return [int((pos[0] % screen_width) / px), screen_height - int((pos[1] % screen_height) / px)]", "def get_score_pos(pos, board):\n if piece_on_pos(pos, board):\n return board[pos[0]][pos[1]].points\n else:\n return 0", "def get_pos(self) -> tuple:\n return self.pos", "def position(self):\n return self._position", "def wheel(pos):\n if pos < 85:\n return Color(255, 0, 0)\n elif pos < 170:\n pos -= 85\n return Color(255, 0, 255)\n #return Color(0, 255, 255 - pos * 3)\n else:\n pos -= 170\n return Color(255, 0, 0)\n #return Color(0, 255, 27)", "def grab_point(self, pos):\n self.move_cartesian_frame_linear_interpolation(tfx.pose(np.array(pos), np.array(self.GRAB_ORIENTATION)), 0.1)\n self.grab_current_point()", "def distinter(self,pos):\n\t\tdist = 0\n\t\taux = self.posbase\n\t\twhile not self.eob():\n\t\t\tif self.checkintercambio(pos):\n\t\t\t\tdist = self.posbase - aux\n\t\t\tself.posbase +=1\n\t\tself.posbase = aux\n\t\treturn dist" ]
[ "0.62909096", "0.61817384", "0.6158584", "0.61115795", "0.60676634", "0.60562605", "0.6010754", "0.5980439", "0.59123677", "0.58966935", "0.5874537", "0.5850645", "0.5845945", "0.58204806", "0.5793631", "0.5792949", "0.573503", "0.5716095", "0.56889486", "0.56889486", "0.5688929", "0.56784123", "0.5678216", "0.5674803", "0.56656253", "0.56567776", "0.5654121", "0.56455976", "0.5641794", "0.5641746" ]
0.62485445
1
Return the current direction of the shutter (UP/DOWN/STOP).
def get_shutter_direction(self) -> ShutterDirection: hex_direction = hexlify(self.message[137:139]).decode() directions = dict(map(lambda d: (d.value, d), ShutterDirection)) return directions[hex_direction]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_direction(self):\n return self.wink.current_fan_direction()", "def get_direction(self) -> int: \r\n if time.time() > self.stop_timer:\r\n return Directions.stop\r\n else:\r\n return self.direction", "def current_direction(self) -> str:\n if self._device.fan_dir == SENSEME_DIRECTION_FORWARD:\n return DIRECTION_FORWARD\n return DIRECTION_REVERSE", "def direction(self):\r\n return 180 - atan2(self.x, self.y)*180/pi", "def get_direction(self):\r\n return self.__direction", "def get_direction(self):\n return self.direction", "def direction(self):\n return(copysign(1, self.volume))", "def wind_direction(self):\n return self.flow_field.wind_direction", "def get_mount_direction(self):\r\n return self._studio.get_mount_direction()", "def find_direction(self):\n\t\tif self.direction == OUTPUT.MOTOR_UP:\n\t\t\tfor floor in xrange(self.currentFloor+1, config.NUM_FLOORS):\n\t\t\t if self.orderQueue.has_order_in_floor(floor):\n\t\t\t\t\treturn OUTPUT.MOTOR_UP\n\t\t\treturn OUTPUT.MOTOR_DOWN\n\t\telse:\n\t\t\tfor floor in xrange(self.currentFloor-1, -1, -1):\n\t\t\t\tif self.orderQueue.has_order_in_floor(floor):\n\t\t\t\t\treturn OUTPUT.MOTOR_DOWN\n\t\t\treturn OUTPUT.MOTOR_UP\n\t\treturn OUTPUT.MOTOR_UP", "def wind_direction(self):\n names = ['anc_wind_direction']\n return self.sensor.get_with_fallback('wind_direction', names)", "def get_direction(self):\n\n return -1 if self.curr_player == self.PLAYER1 else 1", "def read_direction(self):\n global motor_direction\n with self._lock:\n return motor_direction", "def getDirection(self):\n return self.ray.direction", "def direction(self):\n if self._is_hit:\n return Direction.NOT_MOVING\n return self._dir", "def direction(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"direction\")", "def get_arm_direction(self):\n return 1", "def get_direction(self):\n return self.actual_coordinates[2]", "def wind_bearing(self) -> float:\r\n return self._first_timeserie[\"data\"][\"instant\"][\"details\"][\r\n \"wind_from_direction\"\r\n ]", "def bullet_direction(self) -> Direction:\n # Randomly get a direction\n if self.get_random_direction():\n direction = Direction.UP\n\n else:\n direction = Direction.DOWN\n\n return direction", "def getDirection(self):\n return self.listener.direction", "def direction(self) -> int:\n return self._direction", "def direction(self):\n return atan2d(self.y, self.x)", "def direction(self) -> str:\n return pulumi.get(self, \"direction\")", "def gravity_direction(self):\r\n return self._arm.gravity_direction", "def current_direction(self):\n return self._attributes.get(\"current_direction\")", "def direction(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"direction\")", "def direction(self):\n return self.cfg.direction", "def direction(self):\n g = self._grad_f(self._x, *self._args)\n self._calls[1] += 1\n if self._prev_dx is None:\n dx = -g\n else:\n b = max(0, np.dot(g, g - self._prev_g) / np.sum(self._prev_g ** 2))\n dx = -g + b * self._prev_dx\n if np.dot(dx, g) > 0:\n dx = -g\n self._prev_g = g\n self._prev_dx = dx\n return np.nan_to_num(dx)", "def get_direction(pi_values):\n if pi_values == (0, 0, 0, 0):\n return 'do not move'\n pi_right, pi_up, pi_left, pi_down = pi_values\n pi_sum = sum((pi_right, pi_up, pi_left, pi_down))\n\n p_right = pi_right / pi_sum\n p_up = pi_up / pi_sum\n p_left = pi_left / pi_sum\n p_down = pi_down / pi_sum\n\n return np.random.choice(\n ('right', 'up', 'left', 'down'),\n p=[p_right, p_up, p_left, p_down])" ]
[ "0.7377894", "0.7003519", "0.6926874", "0.6845885", "0.684002", "0.6758815", "0.6755902", "0.6728268", "0.66687226", "0.6613132", "0.6575908", "0.65501356", "0.65354353", "0.6532375", "0.65267795", "0.650473", "0.6488082", "0.64337397", "0.6433097", "0.64316136", "0.64236265", "0.640169", "0.6400526", "0.63985574", "0.636132", "0.6325913", "0.63143796", "0.6254806", "0.6216719", "0.6204871" ]
0.7295307
1
Return the current thermostat state.
def get_thermostat_state(self) -> DeviceState: hex_power = hexlify(self.message[137:138]).decode() return DeviceState.ON if hex_power == DeviceState.ON.value else DeviceState.OFF
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_state(self):\n return self.env.sim.get_state()", "def get_current_state(self):\n return self._current_state", "def state(self):\n return self._device.temperature", "def get_state(self):\n return self.controller.get_state()", "def get_current_state(self):\n return self.world.get_state()", "def state(self):\n return self.device.status(station=self.station_number)", "def get_state(self):\n if self.state:\n return self.state\n\n from timon.state import TMonState\n self.state = state = TMonState(self.cfg['statefile'], config=self)\n return state", "def get_state(self):\n return self.wm.state if self.wm else None", "def get_current_state(self):\n return self.game.get_current_state()", "def get_state(self):\r\n alarm = self._alarm()\r\n return alarm.state", "def get_trace_state(self):\n return self.__sensor_states[4]", "def get_state(self):\n return self._env.get_state()", "def thermostat(self) -> dict[str, Any]:\n return self.data.ecobee.get_thermostat(self.thermostat_index)", "def get_state(self):\n return self.state", "def get_state(self):\n return self.state", "def state(self):\n return self.device.device_data[self.device_id]['temperature']", "def state(self):\n return self.get_state()", "def GetState(self):\r\n \r\n return self.state", "def get_temperature_state(self):\n return self.__sensor_states[0]", "def _get_state(self):\n fw_wp_en = (self._interface.get('fw_wp_en') == 'on')\n fw_wp = (self._interface.get('fw_wp') == 'on')\n if fw_wp_en:\n return self._STATE_FORCE_ON if fw_wp else self._STATE_FORCE_OFF\n else:\n return self._STATE_ON if fw_wp else self._STATE_OFF", "def get_state(self):\n return self._state", "def get_state(self):\n return self._state", "def get_state(self):\n return self._state", "def get_state(self):\n return self._state", "def get_state(self):\n return self._state", "def state(self):\n state = 'Unknown'\n closest = 0.5\n for device, method_name in self._methods:\n if method_name.startswith('wm_'):\n state_name = method_name.replace('wm_', '', 1)\n wm_state = getattr(device, method_name)\n diff = wm_state()\n if diff < closest:\n state = state_name\n closest = diff\n return state", "def current_state(self):\n LOGGER.debug('Getting current_state: %s', self._current_state)\n return self._current_state", "def current_state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"current_state\")", "def state(self):\n return self.device.value()", "def getState(self) :\n return self.state" ]
[ "0.72841996", "0.71483696", "0.7144572", "0.71216285", "0.7120051", "0.7117457", "0.7110498", "0.7099462", "0.70663476", "0.7061033", "0.7054098", "0.70529044", "0.70514995", "0.7017588", "0.7017588", "0.69712144", "0.6947047", "0.6907573", "0.69067836", "0.6891913", "0.68785274", "0.68785274", "0.68785274", "0.68785274", "0.68785274", "0.6832695", "0.68052685", "0.67736053", "0.6754577", "0.67395216" ]
0.74006474
0
Return the current thermostat mode.
def get_thermostat_mode(self) -> ThermostatMode: hex_mode = hexlify(self.message[138:139]).decode() states = dict(map(lambda s: (s.value, s), ThermostatMode)) return ThermostatMode.COOL if hex_mode not in states else states[hex_mode]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_mode(self):\r\n return self._api.get_mode()", "def getmode(self):\n return self.mode", "def mode(self) -> str:\n return pulumi.get(self, \"mode\")", "def mode(self) -> str:\n return pulumi.get(self, \"mode\")", "def get_mode(self):\r\n return self.mode", "def currentMode(self):\n logger.debug(\"Func: currentMode/getter\")\n\n return self._currentsDict[\"currentMode\"]", "def getMode(self):\n return self._mode", "def get_mode(self) -> str:\n\n return self.send(self.cmd.GET_MODE)", "def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")", "def mode(self) -> Optional[str]:\n return pulumi.get(self, \"mode\")", "def mode(self):\n return self._lift(\"mode\")", "def mode(self):\r\n return self._mode", "def mode(self):\r\n return self._mode", "def mode(self):\r\n return self._mode", "def mode(self):\n return self._data.get('mode', None)", "def mode(self):\n return self._mode", "def mode(self):\n return self._mode", "def mode(self):\n return self._mode", "def get_mode(self, ):\n return self.get_parameter('mode')", "def mode(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"mode\")", "def getMode(self):\n with self.lock:\n mode = self.mode\n return mode", "def mode(self):\n\n return self._mode", "def mode(self):\n return self.__mode", "def mode(self) -> Optional[str]:\n for mode in self._modes:\n if mode.active:\n return mode.name\n return None", "def mode(self) -> Mode:\n return self._mode", "def get_current_mode(self):\n return self.read(0xa2)", "def current_fan_mode(self):\n fan_speed = self.data.get(\"windspeed\")\n if fan_speed is None:\n return None\n if fan_speed == \"1\":\n return \"low\"\n elif fan_speed == \"2\":\n return \"medium\"\n elif fan_speed == \"3\":\n return \"high\"\n return fan_speed", "def mode(self) -> str:\r\n return self._mode", "def mode(self):\n if self._vsanobj.id is None:\n raise VsanNotPresent(\"Vsan \" + str(self._vsanobj._id) + \" is not present on the switch.\")\n out = self.__show_zone_status()\n return out[get_key(zonekeys.MODE, self._SW_VER)]", "def mode(self):\n return self._mode_func" ]
[ "0.7658945", "0.7552529", "0.7548578", "0.7548578", "0.7477973", "0.73740226", "0.7363553", "0.7352882", "0.7321897", "0.7321897", "0.72539693", "0.7151159", "0.7151159", "0.7151159", "0.7148779", "0.7146024", "0.7146024", "0.7146024", "0.7132851", "0.7081131", "0.70633066", "0.70606446", "0.7035483", "0.7001642", "0.7000375", "0.6976099", "0.69750017", "0.6963167", "0.68972224", "0.6887704" ]
0.7637281
1
Return the current thermostat fan level.
def get_thermostat_fan_level(self) -> ThermostatFanLevel: hex_level = hexlify(self.message[140:141]).decode() states = dict(map(lambda s: (s.value, s), ThermostatFanLevel)) return states[hex_level[0:1]]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_fan_mode(self):\n fan_speed = self.data.get(\"windspeed\")\n if fan_speed is None:\n return None\n if fan_speed == \"1\":\n return \"low\"\n elif fan_speed == \"2\":\n return \"medium\"\n elif fan_speed == \"3\":\n return \"high\"\n return fan_speed", "def get_fan_state(self):\n return self.__sensor_states[1]", "def current_fan_mode(self):\n return self._current_fan_mode", "def fan_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"fan_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n fan_speed = self.ac.status.fan_speed\n value = self.FAN_MODE_MAPPING[fan_speed]\n _LOGGER.debug(f\"fan_mode: returning {value} (derived from {fan_speed})\")\n return value\n else:\n _LOGGER.debug(f\"fan_mode: returning FAN_OFF - device is off\")\n return FAN_OFF", "def get_fan_mode(self):\n return self.__fan_mode", "def get_level(self, channel=None):\n return int(self.getSensorData(\"FILLING_LEVEL\", channel))", "def fan_mode(self):\n return self._fan_mode", "def fan_mode(self):\n return self._fan_mode", "def get_antenna_level(self):\n response = self.parent.rfid.get_antenna_level()\n response = response[0]\n return response", "def fan_mode(self):\n return self.fan_mode_index", "def _do_get_level(self):\n logging.info(__name__ + ' : Read level of channel 1')\n result = self._execute('R1')\n return float(result.replace(\"R\", \"\")) / 10", "def fan_mode(self) -> str | None:\n return self._current_fan_mode", "def current_fan_mode(self):\n if self._device.fan == self._device.FAN_AUTO:\n return STATE_AUTO\n elif self._device.fan == self._device.FAN_ON:\n return STATE_ON\n return STATE_UNKNOWN", "def get_battery_level(self) -> int:\n\n try:\n self._serial.transmit(b'\\x51\\x00')\n response = self._get_reply(0x51, 1, 0.25)\n finally:\n self._gpio.sleep()\n\n return response[2]", "def get_level(self) -> int:\n return self.rstate.level()", "def get_fan_speed(self):\n return self.__fan_speed", "def fan_mode(self) -> str | None:\n if self.vera_device.get_fan_mode() == \"ContinuousOn\":\n return FAN_ON\n return FAN_AUTO", "def fan_mode(self) -> int | None:\n return self.cluster.get(\"fan_mode\")", "def fan_mode(self):\n if self._client.fan == self._client.FAN_ON:\n return FAN_ON\n return FAN_AUTO", "def tower_status_radiant(self):\n return self._get(\"tower_status_radiant\")", "def battery_level(self):\n return self.battery", "def current_direction(self):\n return self.wink.current_fan_direction()", "def get_water_level(self):\n return self.water_level", "def get_fan_mode(self):\n return self.parent._fan_auto_mode", "def fan_mode(self) -> str:\n fan_mode = self._node.aux_properties.get(CMD_CLIMATE_FAN_SETTING)\n if not fan_mode:\n return FAN_OFF\n return UOM_TO_STATES[UOM_FAN_MODES].get(fan_mode.value, FAN_OFF)", "def get_lux(self):\n\n svc = \"urn:micasaverde-com:serviceId:LightSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentLevel\")", "def get_fan_speed(self):\n response = self.parent.fancoolers.get_speed()\n if response is not None:\n response = response[0]\n return response", "def level(self):\n return self.__pin.pwm", "def get_fan(self):\n return _fan", "def getLightSensor() -> int:\n pass" ]
[ "0.74703616", "0.7066224", "0.68729824", "0.68550014", "0.67621636", "0.6755983", "0.67260545", "0.67260545", "0.67141235", "0.66230714", "0.65797645", "0.6569751", "0.64834934", "0.6479434", "0.64644015", "0.64276075", "0.6419574", "0.63961965", "0.63904285", "0.6337175", "0.63295203", "0.63199514", "0.6311229", "0.6308742", "0.62616515", "0.6253802", "0.6226748", "0.6220427", "0.62179214", "0.6185005" ]
0.7609413
0
Return the current thermostat fan swing.
def get_thermostat_swing(self) -> ThermostatSwing: hex_swing = hexlify(self.message[140:141]).decode() return ( ThermostatSwing.OFF if hex_swing[1:2] == ThermostatSwing.OFF.value else ThermostatSwing.ON )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_fan_mode(self):\n fan_speed = self.data.get(\"windspeed\")\n if fan_speed is None:\n return None\n if fan_speed == \"1\":\n return \"low\"\n elif fan_speed == \"2\":\n return \"medium\"\n elif fan_speed == \"3\":\n return \"high\"\n return fan_speed", "def get_fan_state(self):\n return self.__sensor_states[1]", "def current_fan_mode(self):\n return self._current_fan_mode", "def fan_mode(self) -> str | None:\n return self._current_fan_mode", "def fan_mode(self):\n return self._fan_mode", "def fan_mode(self):\n return self._fan_mode", "def fan_mode(self):\n return self.fan_mode_index", "def speed(self) -> str:\n current_wink_speed = self.wink.current_fan_speed()\n if SPEED_AUTO == current_wink_speed:\n return SPEED_AUTO\n if SPEED_LOWEST == current_wink_speed:\n return SPEED_LOWEST\n if SPEED_LOW == current_wink_speed:\n return SPEED_LOW\n if SPEED_MEDIUM == current_wink_speed:\n return SPEED_MEDIUM\n if SPEED_HIGH == current_wink_speed:\n return SPEED_HIGH\n return None", "def get_fan_mode(self):\n return self.__fan_mode", "def fan_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"fan_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n fan_speed = self.ac.status.fan_speed\n value = self.FAN_MODE_MAPPING[fan_speed]\n _LOGGER.debug(f\"fan_mode: returning {value} (derived from {fan_speed})\")\n return value\n else:\n _LOGGER.debug(f\"fan_mode: returning FAN_OFF - device is off\")\n return FAN_OFF", "def current_power_w(self):\n if self._devtype == \"pod\":\n return self._current_consumption\n return False", "def get_fan_speed(self):\n response = self.parent.fancoolers.get_speed()\n if response is not None:\n response = response[0]\n return response", "def current_fan_mode(self):\n if self._device.fan == self._device.FAN_AUTO:\n return STATE_AUTO\n elif self._device.fan == self._device.FAN_ON:\n return STATE_ON\n return STATE_UNKNOWN", "def fan_mode(self) -> str | None:\n if self.vera_device.get_fan_mode() == \"ContinuousOn\":\n return FAN_ON\n return FAN_AUTO", "def fan_mode(self) -> str | None:\n state = self._state\n return state.custom_fan_mode or _FAN_MODES.from_esphome(state.fan_mode)", "def fan_mode(self):\n if self._client.fan == self._client.FAN_ON:\n return FAN_ON\n return FAN_AUTO", "def fan_mode(self) -> int | None:\n return self.cluster.get(\"fan_mode\")", "def wing(self):\n return", "def swing_mode(self) -> str | None:\n return _SWING_MODES.from_esphome(self._state.swing_mode)", "def get_fan(self):\n return _fan", "def get_fan_speed(self):\n return self.__fan_speed", "def get_fan_mode(self):\n return self.parent._fan_auto_mode", "def swing_mode(self) -> str | None:\n return self._current_swing_mode", "def current_direction(self):\n return self.wink.current_fan_direction()", "def get_thermostat_fan_level(self) -> ThermostatFanLevel:\n hex_level = hexlify(self.message[140:141]).decode()\n states = dict(map(lambda s: (s.value, s), ThermostatFanLevel))\n return states[hex_level[0:1]]", "def exposedSurf(self):\n if self.precision:\n h = self.evaluations.exposedWing.edges[1].point1.x # height of trapezoid\n B = self.chordRootW # major base of trapezoid\n b = self.evaluations.chordIntersected.edges[1].length # minor base of trapezoid\n internalS = 2 * (0.5 * (b + B) * h) # wing surface internal at fuselage\n return self.surfaceW - internalS\n else:\n return self.surfaceW - self.fuselageDiameter * self.cMACW # first guess for a faster evaluation", "def _get_wred(self):\n return self.__wred", "def _get_wred(self):\n return self.__wred", "def _get_wred(self):\n return self.__wred", "def current_swing_mode(self):\n return self._current_swing_mode" ]
[ "0.70957595", "0.6493973", "0.631704", "0.63008916", "0.6264081", "0.6264081", "0.6234834", "0.62224776", "0.6185605", "0.6077536", "0.604315", "0.6022031", "0.6020563", "0.6006388", "0.5999204", "0.5998517", "0.59481037", "0.59452474", "0.59378165", "0.592065", "0.59011304", "0.5897364", "0.5879388", "0.5875109", "0.5855873", "0.58484167", "0.5835872", "0.5835872", "0.5835872", "0.582918" ]
0.68038505
1
Find a node marked with the given pattern, "targetPattern", in a DOM object, "someStan"
def findPattern(someStan, targetPattern): pat = getattr(someStan, 'pattern', None) if pat == targetPattern: return someStan.cloneNode() for child in getattr(someStan, 'children', []): result = findPattern(child, targetPattern) if result is not None: return result.cloneNode()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_target(self, target):\n components = target.split(':', maxsplit=1)\n if len(components) == 1:\n namespace = ''\n target_name = components[0]\n else:\n namespace = components[0]\n target_name = components[1]\n\n logging.info(f\"Finding target with namespace '{namespace}', name '{target_name}'\")\n for target in self.targets:\n if target.name == target_name and target.namespace==namespace:\n return target", "def node_by_pattern(self, pattern: str):\n if self.scope != '':\n if self.scope[-1] == '/':\n pattern = self.scope + pattern\n else:\n pattern = self.scope + '/' + pattern\n found_names = find_object_by_pattern(self._matched_nodes_names, pattern)\n if len(found_names) > 1:\n raise Error('The amount of nodes matched pattern \"{}\" is more than 1. '.format(pattern) +\n refer_to_faq_msg(78))\n if len(found_names) == 0:\n return None\n return Node(self.graph, found_names[0])", "def search(self, target):\n if DEBUG: print('search({})'.format(target))\n\n result = False\n\n cur = self.head\n \n output = \"\\tPath: \"\n \n while cur:\n output += \"{}\".format(cur.val)\n if not cur.next and not cur.below:\n output += \" END\"\n break\n elif cur.next == None or\\\n target < cur.next.val:\n cur = cur.below\n output += \" v \"\n elif cur.next.val == target:\n result = True\n output += \" -> {}! FOUND\".format(target)\n break\n elif target > cur.next.val:\n output += \" -> \"\n cur = cur.next\n else:\n print(\"\\thow did i get here\")\n\n if DEBUG: print(output)\n if DEBUG: print('\\t{}'.format(result))\n return result", "def parse(self, target, pattern):\n indexset = IndexSet(target)\n return self.read(indexset, pattern, 0)", "def search(self, pattern):\n result = None\n for node, data in self.traverse():\n if pattern in data['meta']:\n return node, data\n return result", "def _find_in_xml(self, pattern, element=None, namespace=Xmlns_path):\n el = self._xml if element is None else element\n return el.find('.//' + namespace + pattern)", "def match(pattern, target):\n pattern = ''.join('.*' if c == '*' else re.escape(c) for c in pattern)\n return bool(re.match('^' + pattern + '$', target))", "def findNode(self, target: hash.hash.Hash):\n for bucket in self.buckets:\n if bucket.inRange(nodeID):\n for node in bucket:\n if node.hash == target:\n return node\n \n return None\n return None", "def search(self, pattern):\n raise NotImplementedError()", "def search(line, pattern_tree, pattern_path, result_tree, result_path):\n node = (node for node in pattern_path[:])\n pattern_path[:] = [] # Start search at root\n while not search_down(line, pattern_tree, pattern_path, result_tree, result_path):\n try:\n pattern_path.append(node.next())\n except StopIteration:\n break", "def find_pattern_in_str(pattern, source):\n pattern = re.compile(pattern)\n for match in re.finditer(pattern,source):\n return match.groups()\n return None", "def _FindTarget(self):\n if nobibparser:\n return self._FindBibEntriesRegex()\n else:\n return self._FindBibEntriesParser()", "def find_node(self, type):\n pass", "def matches(self, target):\n raise NotImplementedError()", "def node_find_by_name( fdt, node_name, starting_node = 0, multi_match=False ):\n\n matching_nodes = []\n matching_node = None\n\n search_active = False\n if starting_node == \"/\" or starting_node == 0:\n search_active = True\n\n for node in fdt.node_iter():\n if not search_active:\n if node.path == starting_node:\n search_active = True\n\n if search_active:\n if node.name == node_name:\n if not matching_nodes:\n matching_node = node\n matching_nodes.append( node )\n\n return matching_node, matching_nodes", "def SearchRePy20(context, pattern, arg=None):\n if not arg:\n arg = context.node\n arg = Conversions.StringValue(arg)\n proc = context.processor\n matches_nodeset = []\n _re =re.compile(pattern)\n _match =_re.search(arg)\n while _match:\n proc.pushResult()\n proc.writers[-1].startElement('Match', EMPTY_NAMESPACE)\n _groups =_match.groups()\n # .groups() return empty tuple when the pattern did not do grouping\n if not _groups: _groups =tuple(_match.group())\n for group in _groups:\n proc.writers[-1].startElement('Group', EMPTY_NAMESPACE)\n # MatchObject groups return None if unmatched\n # unlike .findall() returning empty strings\n proc.writers[-1].text(group or '')\n proc.writers[-1].endElement('Group')\n proc.writers[-1].endElement('Match')\n frag = proc.popResult()\n context.rtfs.append(frag)\n matches_nodeset.append(frag.childNodes[0])\n _match =_re.search(arg, _match.end())\n return matches_nodeset", "def findTag(self, query):\n\t\ttry:\n\t\t\tassert(type(query)) == str or Pattern\n\t\t\treturn self.driver.find_element_by_tag_name(query)\n\t\texcept Exception as e:\n\t\t\tprint(\"Could not find ID: {}\\n\\n{}\".format(query, e))\n\t\t\treturn -1", "def search_nodes_by_pattern(self, pattern):\n searched_nodes = []\n if pattern and pattern != '/':\n pattern = pattern.lower()\n for name, node in self._normal_node_map.items():\n name = name.lower()\n pattern_index = name.rfind(pattern)\n if pattern_index >= 0 and name.find('/', pattern_index + len(pattern)) == -1:\n searched_nodes.append(node)\n return searched_nodes", "def WhereMatches(self, pattern):\n regex = re.compile(match_util.ExpandRegexIdentifierPlaceholder(pattern))\n return self.Filter(lambda s: (\n regex.search(s.source_path) or\n regex.search(s.object_path) or\n regex.search(s.full_name) or\n s.full_name is not s.template_name and regex.search(s.template_name) or\n s.full_name is not s.name and regex.search(s.name)))", "def match_node_id(self, id_, match):\n pass", "def search_summary(self, target, name=\"*summary.txt\"):\n summary_found = glob.glob(os.path.join(self.path, name))\n if summary_found:\n summary_name = summary_found[0]\n if os.path.exists(summary_name):\n with open(summary_name, \"r\") as summary_file:\n for line in summary_file:\n if target in line:\n return line[:-1]\n return None", "def match_any_node_id(self, match):\n pass", "def depth_first_search(self, target: Dict) -> Optional[Node]:\n\n def search(current_node: Node):\n flag = True\n for k, v in target.items():\n flag = flag and getattr(current_node, k) == v\n if not flag:\n break\n if flag:\n return current_node\n for child in current_node.children:\n ret = search(child)\n if ret:\n return ret\n return search(self.root_node)", "def match(tgt, opts=None, minion_id=None):\n if not opts:\n opts = __opts__\n if not minion_id:\n minion_id = opts.get(\"id\")\n\n return bool(re.match(tgt, minion_id))", "def find_substring(pattern, target):\n # Eliminate trivial cases.\n n = len(target)\n m = len(pattern)\n if (not n or not m or m > n):\n return False\n #\n # Search by comparing hashes.\n pattern_hash = hash(pattern)\n for string_start in range(n - m + 1):\n string_end = string_start + m\n if pattern_hash == hash(target[string_start:string_end]):\n return True\n return False", "def matches(self, target):\n return fnmatch(str(target).lower(), self._pattern.lower())", "def _find_pattern(self, locator):\n assert locator is not None and len(locator) > 0\n locator = locator.strip().lower()\n (pattern, sensitivity) = self._parse_locator(locator)\n\n if (sensitivity != None):\n sensitivity = float(sensitivity)\n pattern = Pattern(pattern).similar(sensitivity)\n else:\n pattern = pattern\n return pattern", "def find(self, **opts):\n return self.parser.find(search_inside=self, **opts)", "def find_node(self, value):\n for (fun, node) in self.__root.__fast_find:\n if fun(value):\n return node\n return None", "def find(self, p):\n pass" ]
[ "0.5607359", "0.55932343", "0.5582585", "0.5578958", "0.55475706", "0.54207045", "0.5325378", "0.52941066", "0.5246212", "0.5242798", "0.5218119", "0.5108456", "0.5108017", "0.5065287", "0.5042875", "0.5022458", "0.5019698", "0.5003935", "0.4971588", "0.49277413", "0.492709", "0.49096534", "0.48680365", "0.48549688", "0.48404858", "0.48374325", "0.4834395", "0.4832018", "0.48140183", "0.48130816" ]
0.77453876
0
Call this to register a converter for one of your custom types.
def registerConverter(convertType, converter): converters[convertType] = converter
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def register_converter(self, converter, conv_type, conv_format=None):\n self.flask_plugin.register_converter(converter, conv_type, conv_format)", "def addCustomConverter(self, converter):\r\n verifyClass(ICustomROSConverter, converter)\r\n\r\n if converter.MESSAGE_TYPE in self._customTypes:\r\n raise InternalError('There are multiple Converters given for '\r\n 'message type \"{0}\".'.format(\r\n converter.MESSAGE_TYPE))\r\n\r\n try:\r\n pkg, name = package_resource_name(converter.MESSAGE_TYPE)\r\n except ValueError:\r\n raise InternalError('msg type is not valid. Has to be of the from '\r\n 'pkg/msg, i.e. std_msgs/Int8.')\r\n\r\n self._customTypes[converter.MESSAGE_TYPE] = (converter,\r\n self._loader.loadMsg(pkg, name))", "def register_converter(self, converter, name=None):\n if not name:\n name = converter.__name__\n if \"Converter\" in name:\n name = converter.__name__.replace(\"Converter\", \"\")\n self.url_map.converters[name] = converter", "def addConverter(self, *args):\n return _libsbml.SBMLConverterRegistry_addConverter(self, *args)", "def add_convertor(metadata):\n\n metadata[\"convertor\"] = make_convertor(metadata[\"name\"],\n metadata[\"datatype\"])", "def register_converters(self, converter_list):\n for converter in converter_list:\n self.register_converter(converter)", "def register_from_converter(cls, coord_system):\n print(f\"Registering from converter for {coord_system} in {cls.__name__}\")\n def anon_reg_func(callback):\n cls.from_other_conversions[coord_system] = callback\n return callback\n return anon_reg_func", "def register_field(self, field, *args):\n self.ma_plugin.map_to_openapi_type(*args)(field)", "def register_to_converter(cls, coord_system):\n print(f\"Registering to converter for {coord_system} in {cls.__name__}\")\n def anon_reg_func(callback):\n cls.to_other_conversions[coord_system] = callback\n return callback\n return anon_reg_func", "def addCustomResourceType(self, typeName, convertFunc):\n\t\tif typeName in self.customResourceTypeMap:\n\t\t\traise Exception('Custom resource type \"' + typeName + '\" is already registered.')\n\t\tself.customResourceTypeMap[typeName] = convertFunc", "def converter(item):\n pass", "def addModelType(convertContext, typeName, convertFunc):\n\tif not hasattr(convertContext, 'modelTypeMap'):\n\t\tconvertContext.modelTypeMap = dict()\n\n\tif typeName in convertContext.modelTypeMap:\n\t\traise Exception('Model type \"' + typeName + '\" is already registered.')\n\tconvertContext.modelTypeMap[typeName] = convertFunc", "def register_type(type_, serializer_deserializer: Tuple[Callable[[T], str], Callable[[str], T]]):\n global _transform\n if type_ not in _transform:\n _transform[type_] = serializer_deserializer\n _load_types()", "def test_auto_register(self):\n\n class TestConverter(BaseConverter):\n pass\n\n class TestConverterWithMeta(BaseConverter):\n class Meta:\n name = 'test'\n\n self.assertEquals(TestConverter, ConverterRegistry.get('TestConverter'))\n self.assertEquals(TestConverterWithMeta, ConverterRegistry.get('test'))", "def converterBean(self, converter):\n pass", "def add_special_conversion(self, from_units, to_units, converter):\n self.special_conversions[(from_units, to_units)] = converter", "def getConverters(self):\n pass", "def convert_type(self, value, schema_type, **kwargs):", "def convert(self, converter=None):\n if converter is not None:\n self.converter = converter\n self.code(self.prepare)", "def addInstanceDataType(self, typeName, convertFunc):\n\t\tif typeName in self.instanceDataTypeMap:\n\t\t\traise Exception('Instance data type \"' + typeName + '\" is already registered.')\n\t\tself.instanceDataTypeMap[typeName] = convertFunc", "def converterValue(self, converter):\n pass", "def getConverterFor(self, *args):\n return _libsbml.SBMLConverterRegistry_getConverterFor(self, *args)", "def converters(self) -> Iterator[Tuple[str, Type[ConverterAPI]]]:", "def newConverter(self, value):\r\n return JsonConverter(value, self)", "def addNodeType(self, typeName, convertFunc):\n\t\tif typeName in self.nodeTypeMap:\n\t\t\traise Exception('Node type \"' + typeName + '\" is already registered.')\n\t\tself.nodeTypeMap[typeName] = convertFunc", "def add_typecheck(self, name: str, callable=None):\n if name in self.__custom_types:\n raise ValueError(\"Type '{}' already exists\".format(name))\n self.__custom_types[name] = callable or self.TYPES[name]", "def register(\n self, name='', magic=(), patterns=(),\n funcwrapper=lambda _:_\n ):\n\n def _decorator(converter):\n if not name:\n raise ValueError('No registration name given')\n if name in self._names:\n raise ValueError(f'Registration name `{name}` already in use for {self._names[name]}')\n if not isinstance(magic, (list, tuple)):\n raise TypeError('Registration parameter `magic` must be list or tuple')\n if not isinstance(patterns, (list, tuple)):\n raise TypeError('Registration parameter `patterns` must be list or tuple')\n converter.format = name\n self._names[name] = converter\n ## magic signatures\n for sequence in magic:\n self._magic.append((Magic(sequence), converter))\n # sort the magic registry long to short to manage conflicts\n self._magic = list(sorted(\n self._magic,\n key=lambda _i:len(_i[0]), reverse=True\n )\n )\n ## glob patterns\n for pattern in (*patterns, f'*.{name}'):\n self._patterns.append((to_pattern(pattern), converter))\n return funcwrapper(converter)\n\n return _decorator", "def _register_builtin_coercers(self):\n type(self).__registry.extend(\n [\n # Check if the annotaion is a date-type\n Coercer(checks.isdatetype, self.cls._coerce_datetime),\n # Check if the annotation maps directly to a builtin-type\n # We use the raw annotation here, not the origin, since we account for\n # subscripted generics later.\n Coercer(\n checks.isbuiltintype, self.cls._coerce_builtin, check_origin=False\n ),\n # Check for a class with a ``from_dict()`` factory\n Coercer(checks.isfromdictclass, self.cls._coerce_from_dict),\n # Enums are iterable and evaluate as a Collection,\n # so we need to short-circuit the next set of checks\n Coercer(checks.isenumtype, self.cls._coerce_enum),\n # Check for a subscripted generic of the ``Mapping`` type\n Coercer(checks.ismappingtype, self.cls._coerce_mapping),\n # Check for a subscripted generic of the ``Collection`` type\n # This *must* come after the check for a ``Mapping`` type\n Coercer(checks.iscollectiontype, self.cls._coerce_collection),\n # Finally, try a generic class coercion.\n Coercer(inspect.isclass, self.cls._coerce_class),\n ]\n )", "def addItemListType(self, typeName, convertFunc):\n\t\tif typeName in self.itemListTypeMap:\n\t\t\traise Exception('Item list type \"' + typeName + '\" is already registered.')\n\t\tself.itemListTypeMap[typeName] = convertFunc", "def converter(self):\r\n return self._converter" ]
[ "0.7982891", "0.7092699", "0.69731474", "0.6862946", "0.65146774", "0.6331026", "0.61889535", "0.6141865", "0.61362225", "0.60911644", "0.59488845", "0.5865866", "0.5849205", "0.5760711", "0.5654728", "0.5608014", "0.5556543", "0.55253804", "0.5516893", "0.54477257", "0.54117346", "0.540516", "0.5377235", "0.53681934", "0.5361304", "0.5336057", "0.53189844", "0.5278067", "0.521066", "0.5210603" ]
0.8012467
0
Recursively apply stuff converters until we get an xml instance.
def convert(request, stuff): while not isinstance(stuff, xml): convert = converters.get(type(stuff)) if convert is None: raise RuntimeError, "Converter for type %r (%r) not found." % ( type(stuff), stuff) stuff = convert(request, stuff) return stuff.string
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def converters(self) -> Iterator[Tuple[str, Type[ConverterAPI]]]:", "def auto_convert(self):\n nodes_converted = []\n for node_type in self.conversion_spec_sheet:\n print('searching for: %s' % node_type)\n found_nodes = self.list_nodes(node_type)\n print('found: %s nodes' % len(found_nodes))\n for node in found_nodes:\n new_node = self.convert(node)\n nodes_converted.append([node, new_node])\n\n return nodes_converted", "def _xml_convert(self, element):\n\n children = list(element)\n\n if len(children) == 0:\n return self._type_convert(element.text)\n else:\n # if the fist child tag is list-item means all children are list-item\n if children[0].tag == \"list-item\":\n data = []\n for child in children:\n data.append(self._xml_convert(child))\n else:\n data = {}\n for child in children:\n data[child.tag] = self._xml_convert(child)\n\n return data", "def xml2obj(self, src):\n\n\t\tclass DataNode(object):\n\t\t\tdef __init__(self):\n\t\t\t\tself._attrs = {} # XML attributes and child elements\n\t\t\t\tself.data = None # child text data\n\n\t\t\tdef __len__(self):\n\t\t\t\t# treat single element as a list of 1\n\t\t\t\treturn 1\n\n\t\t\tdef __getitem__(self, key):\n\t\t\t\tif isinstance(key, basestring):\n\t\t\t\t\treturn self._attrs.get(key,None)\n\t\t\t\telse:\n\t\t\t\t\treturn [self][key]\n\n\t\t\tdef __contains__(self, name):\n\t\t\t\treturn self._attrs.has_key(name)\n\n\t\t\tdef __nonzero__(self):\n\t\t\t\treturn bool(self._attrs or self.data)\n\n\t\t\tdef __getattr__(self, name):\n\t\t\t\tif name.startswith('__'):\n\t\t\t\t\t# need to do this for Python special methods???\n\t\t\t\t\traise AttributeError(name)\n\t\t\t\treturn self._attrs.get(name,None)\n\n\t\t\tdef _add_xml_attr(self, name, value):\n\t\t\t\tif name in self._attrs:\n\t\t\t\t\t\t# multiple attribute of the same name are represented by a list\n\t\t\t\t\t\tchildren = self._attrs[name]\n\t\t\t\t\t\tif not isinstance(children, list):\n\t\t\t\t\t\t\tchildren = [children]\n\t\t\t\t\t\t\tself._attrs[name] = children\n\t\t\t\t\t\tchildren.append(value)\n\t\t\t\telse:\n\t\t\t\t\tself._attrs[name] = value\n\n\t\t\tdef __str__(self):\n\t\t\t\treturn self.data or ''\n\n\t\t\tdef __repr__(self):\n\t\t\t\titems = sorted(self._attrs.items())\n\t\t\t\tif self.data:\n\t\t\t\t\titems.append(('data', self.data))\n\t\t\t\treturn u'{%s}' % ', '.join([u'%s:%s' % (k,repr(v)) for k,v in items])\n\n\t\tclass TreeBuilder(xml.sax.handler.ContentHandler):\n\t\t\tdef __init__(self):\n\t\t\t\tself.stack = []\n\t\t\t\tself.root = DataNode()\n\t\t\t\tself.current = self.root\n\t\t\t\tself.text_parts = []\n\t\t\t\tself.publicObjects = {}\n\n\t\t\tdef startElement(self, name, attrs):\n\t\t\t\tself.stack.append((self.current, self.text_parts))\n\t\t\t\tself.current = DataNode()\n\t\t\t\tself.text_parts = []\n\t\t\t\t# xml attributes --> python attributes\n\t\t\t\tfor k, v in attrs.items():\n\t\t\t\t\t# Register PublicObject in lookup map\n\t\t\t\t\tif k == \"publicID\":\n\t\t\t\t\t\tself.publicObjects[v] = self.current\n\t\t\t\t\tself.current._add_xml_attr(k, v)\n\n\t\t\tdef endElement(self, name):\n\t\t\t\ttext = ''.join(self.text_parts).strip()\n\t\t\t\tif text:\n\t\t\t\t\tself.current.data = text\n\t\t\t\tif self.current._attrs:\n\t\t\t\t\tobj = self.current\n\t\t\t\telse:\n\t\t\t\t\t# a text only node is simply represented by the string\n\t\t\t\t\tobj = text or ''\n\t\t\t\t\t# try to store the object as float if possible\n\t\t\t\t\ttry: obj = float(obj)\n\t\t\t\t\texcept: pass\n\t\t\t\tself.current, self.text_parts = self.stack.pop()\n\t\t\t\tself.current._add_xml_attr(name, obj)\n\n\t\t\tdef characters(self, content):\n\t\t\t\tself.text_parts.append(content)\n\n\t\tbuilder = TreeBuilder()\n\t\tif isinstance(src,basestring):\n\t\t\txml.sax.parseString(src, builder)\n\t\telse:\n\t\t\txml.sax.parse(src, builder)\n\t\treturn builder", "def parsexml(self):\n raise NotImplementedError", "def load(xml):\n if isinstance(xml, XmlReader):\n for n in _process(xml): yield n\n else:\n with XmlReader.Create(xml) as xr:\n for n in _process(xr): yield n", "def run(self, xml, **kwargs):\n kwargs['output'] = self.__graph__()\n if isinstance(xml, str):\n try:\n self.source = etree.XML(xml)\n except ValueError:\n try:\n self.source = etree.XML(xml.encode())\n except:\n raise ValueError(\"Cannot run error {}\".format(sys.exc_info()[0]))\n else:\n self.source = xml\n super(XMLProcessor, self).run(**kwargs)\n self.output = kwargs['output']\n return kwargs['output']", "def _convert(self):\n root = cElementTree.fromstring(self.html)\n for el in root.getiterator():\n if el in self.visited:\n continue\n self.visited.update([el])\n if el.tag == 'p':\n parser = ParagraphParser(el)\n self.document_state.append(parser.tag)\n self.visited.update(el.getiterator())", "def preprocess(self):\n # Validate the root element type if the subclass wants us to.\n # This is hard to do elsewhere, since the element handlers don't\n # know where they are in the XML document.\n if self.requiredRootElement is not None:\n rootElement = None\n if self.xml.nodeType == self.xml.DOCUMENT_NODE:\n rootElement = self.xml.documentElement\n elif self.xml.nodeType == self.xml.ELEMENT_NODE:\n rootElement = self.xml\n\n if (not rootElement) or rootElement.nodeName != self.requiredRootElement:\n raise UnknownElementError(\"Missing a required %r root element\" %\n self.requiredRootElement)\n\n setattr(self, self.resultAttribute, self.parse(self.xml))", "def adaptXmlToPython(self, *args):\n return _SALOMERuntime.RuntimeSALOME_adaptXmlToPython(self, *args)", "def translate_xml(self):\n self._from_origin_to_dict()\n self._from_dict_to_destination()\n return self", "def adaptXmlToXml(self, *args):\n return _SALOMERuntime.RuntimeSALOME_adaptXmlToXml(self, *args)", "def complete_xml_parsing(self):\n for item in self.entities:\n item.severity = self.parsed_severity\n item.cwes.extend(self.parsed_cwes)\n item.advisory_id = self.parsed_advisory_id\n item.attack_vector = self.parsed_attack_vector\n if self.parsed_cvss_base != '' and is_correct_score(self.parsed_cvss_base):\n cvss_v3 = CvssV3(base_sc=self.parsed_cvss_base)\n if self.parsed_cvss_temporal != '' \\\n and is_correct_score(self.parsed_cvss_temporal):\n cvss_v3.temporal_sc = self.parsed_cvss_temporal\n item.cvss_v3 = cvss_v3\n item.cvss_base_sc_v3 = self.parsed_cvss_base\n item.cvss_temporal_score_v3 = self.parsed_cvss_temporal\n item.published = self.parsed_date", "def do_post_parse_xml(self, *args, **kwargs): # real signature unknown\n pass", "def convert(self, vroot, entry_variables):\n\n for converter in self.converters:\n vroot = converter.convert(vroot, entry_variables)\n return vroot", "def _from_tree_to_etree(self):\n categories = self.tree.get_children('')\n# messagebox.showwarning('_from_tree_to_etree', \\\n# 'categories={}'.format(categories))\n for category in categories:\n \n acategory = etree.SubElement(self.trout, self.tree.item(category)['text'])\n if category =='approved':\n acategory.set('tags', \"('approved',)\")\n elif category =='conflicts':\n acategory.set('tags', \"('conflicts',)\")\n elif category =='suggestions':\n acategory.set('tags', \"('suggestions',)\")\n elif category =='unknown':\n acategory.set('tags', \"('unknown',)\")\n elif category =='cldr':\n acategory.set('tags', \"('cldr',)\")\n else:\n messagebox.showerror('_from_tree_to_etree', \\\n 'unrecognised category >{}<'.format(category))\n return\n# acategory.text = self.tree.item(category)['text']\n sons = self.tree.get_children(category)\n# messagebox.showwarning('_from_tree_to_etree', \\\n# '{}, sons={}'.format(category, sons))\n for son in sons:\n ason = etree.SubElement(acategory, son)\n# ason.text = self.tree.item(son)['text']\n ason.set('values', '{}'.format(self.tree.item(son)['values']))\n ason.set('tags', '{}'.format(tuple(self.tree.item(son)['tags'])))\n grandsons = self.tree.get_children(son)\n for grandson in grandsons:\n agrandson = etree.SubElement(ason, grandson)\n agrandson.text = self.tree.item(grandson)['text']\n agrandson.set('values', \\\n '{}'.format(self.tree.item(grandson)['values']))\n agrandson.set('tags', \\\n '{}'.format(tuple(self.tree.item(grandson)['tags'])))\n# grandsons = self.tree.get_children(grandson)\n# messagebox.showwarning('','{}'.format(etree.tostring(self.trout, \\\n# encoding='unicode', \\\n# pretty_print=True)))\n# messagebox.showwarning('_from_tree_to_etree', \\\n# 'filled with {} categories'.\\\n# format([child.tag for child in self.trout]))\n return self.trout", "def loads(xtext):\n\n class XmlLoads(object):\n \"\"\"\n XMLLOADS implements 'YZXml.loads' functionality\n \"\"\"\n\n def __init__(self, xtext):\n self.index = 0\n self.xtext = xtext\n self.length = len(xtext)\n self.stack = []\n\n def loads(self):\n \"\"\"\n LOADS converts xml-string to object-string\n\n Returns\n -------\n - dict\n object-xml contains {'tag': str, 'attribs': dict, 'elements': list}\n \"\"\"\n\n self.ignore_whitespaces()\n\n if self.index >= self.length:\n return\n\n if self.xtext[self.index] == '<':\n self.index += 1\n\n if self.xtext[self.index] == '/':\n self.index += 1\n\n tag = self.read_until('>')\n self.index += 1\n\n elements = []\n while len(self.stack) > 0 and\\\n (isinstance(self.stack[-1], str) or self.stack[-1]['tag'] != tag):\n elements.append(self.stack.pop())\n\n assert len(self.stack) > 0\n\n self.stack[-1]['elements'].extend(reversed(elements))\n\n else:\n self.ignore_whitespaces()\n tag = self.read_until(' >')\n\n attribs = {}\n if self.xtext[self.index] != '>':\n attribs = self.read_attribs()\n\n self.index += 1\n self.stack.append({'tag': tag, 'attribs': attribs, 'elements': []})\n else:\n self.stack.append(self.read_until('<').strip())\n\n self.loads()\n\n\n def ignore_whitespaces(self):\n \"\"\"\n IGNORE_WHITESPACES reads whitespaces and advances self.index\n \"\"\"\n\n whitespaces = [' ', '\\t', '\\n', '\\r']\n while self.index < self.length and self.xtext[self.index] in whitespaces:\n self.index += 1\n\n def read_until(self, chars):\n \"\"\"\n READ_UNTIL reads charaters and advances self.index\n unitl reaches any character in 'cahrs'\n\n Parameters\n ----------\n - chars: str\n stoping characters\n \"\"\"\n\n start_index = self.index\n\n while self.index < self.length and self.xtext[self.index] not in chars:\n self.index += 1\n\n assert self.index < self.length\n\n return self.xtext[start_index:self.index]\n\n def read_attribs(self):\n \"\"\"\n READ_ATTRIBS reads attributes of an elements\n \"\"\"\n\n attribs = {}\n while self.index < self.length:\n self.ignore_whitespaces()\n if self.xtext[self.index] == '>':\n break\n name = self.read_until('=')\n self.index += 1\n self.read_until('\"')\n self.index += 1\n value = self.read_until('\"')\n self.index += 1\n\n attribs[name] = value\n\n return attribs\n\n xmlloads = XmlLoads(xtext)\n xmlloads.loads()\n return xmlloads.stack.pop()", "def try_convert(self, func, *args, **kwargs):\n try:\n func(*args, **kwargs)\n except UnitsError, e:\n if self.show_xml_context_only:\n e.show_xml_context_only()\n if self.warn_only:\n e.warn = True\n e.level = logging.WARNING\n logging.getLogger('units-converter').log(e.level, unicode(e).encode('UTF-8'))", "def run(self):\n context = etree.iterparse(self.tweets_file, events=('end',), tag=self.tweet_node_name, encoding=\"UTF-8\")\n root = etree.Element(self.root_node_name)\n\n for action, tweet in context:\n tweetText = tweet.findtext(self.text_node_name)\n try:\n converter_node = self._run_converters(tweetText)\n tweet.append(converter_node)\n root.append(deepcopy(tweet))\n except:\n pass\n\n tweet.clear()\n\n # Also eliminate now-empty references from the root node to <Title>\n while tweet.getprevious() is not None:\n del tweet.getparent()[0]\n\n newFile = open(self.dest_file, \"w\")\n newFile.write(etree.tostring(root, pretty_print=True, xml_declaration=True, encoding=\"UTF-8\"))\n newFile.close()", "def iter_encode(self, obj, validation='lax', **kwargs):\n errors = []\n\n try:\n converter = kwargs['converter']\n except KeyError:\n converter = kwargs['converter'] = self.schema.get_converter(**kwargs)\n else:\n if not isinstance(converter, XMLSchemaConverter):\n converter = kwargs['converter'] = self.schema.get_converter(**kwargs)\n\n try:\n level = kwargs['level']\n except KeyError:\n level = 0\n element_data = converter.element_encode(obj, self, level)\n if not self.is_matching(element_data.tag, self.default_namespace):\n errors.append(\"data tag does not match XSD element name\")\n\n if 'max_depth' in kwargs and kwargs['max_depth'] == 0:\n for e in errors:\n yield self.validation_error(validation, e, **kwargs)\n return\n else:\n element_data = converter.element_encode(obj, self, level)\n\n text = None\n children = element_data.content\n attributes = ()\n\n xsd_type = self.get_type(element_data)\n if XSI_TYPE in element_data.attributes:\n type_name = element_data.attributes[XSI_TYPE].strip()\n try:\n xsd_type = self.maps.get_instance_type(type_name, xsd_type, converter)\n except (KeyError, TypeError) as err:\n errors.append(err)\n else:\n default_namespace = converter.get('')\n if default_namespace and xsd_type.attributes:\n # Adjust attributes mapped into default namespace\n\n ns_part = '{%s}' % default_namespace\n for k in list(element_data.attributes):\n if not k.startswith(ns_part):\n continue\n elif k in xsd_type.attributes:\n continue\n\n local_name = k[len(ns_part):]\n if local_name in xsd_type.attributes:\n element_data.attributes[local_name] = element_data.attributes[k]\n del element_data.attributes[k]\n\n attribute_group = self.get_attributes(xsd_type)\n for result in attribute_group.iter_encode(element_data.attributes, validation, **kwargs):\n if isinstance(result, XMLSchemaValidationError):\n errors.append(result)\n else:\n attributes = result\n\n if XSI_NIL in element_data.attributes:\n xsi_nil = element_data.attributes[XSI_NIL].strip()\n if not self.nillable:\n errors.append(\"element is not nillable.\")\n elif xsi_nil not in {'0', '1', 'true', 'false'}:\n errors.append(\"xsi:nil attribute must has a boolean value.\")\n elif xsi_nil in ('0', 'false'):\n pass\n elif self.fixed is not None:\n errors.append(\"xsi:nil='true' but the element has a fixed value.\")\n elif element_data.text is not None or element_data.content:\n errors.append(\"xsi:nil='true' but the element is not empty.\")\n else:\n elem = converter.etree_element(element_data.tag, attrib=attributes, level=level)\n for e in errors:\n yield self.validation_error(validation, e, elem, **kwargs)\n yield elem\n return\n\n if xsd_type.is_simple():\n if element_data.content:\n errors.append(\"a simpleType element can't has child elements.\")\n\n if element_data.text is not None:\n for result in xsd_type.iter_encode(element_data.text, validation, **kwargs):\n if isinstance(result, XMLSchemaValidationError):\n errors.append(result)\n else:\n text = result\n\n elif self.fixed is not None:\n text = self.fixed\n elif self.default is not None and kwargs.get('use_defaults'):\n text = self.default\n\n elif xsd_type.has_simple_content():\n if element_data.text is not None:\n for result in xsd_type.content.iter_encode(element_data.text,\n validation, **kwargs):\n if isinstance(result, XMLSchemaValidationError):\n errors.append(result)\n else:\n text = result\n\n elif self.fixed is not None:\n text = self.fixed\n elif self.default is not None and kwargs.get('use_defaults'):\n text = self.default\n\n else:\n for result in xsd_type.content.iter_encode(element_data, validation, **kwargs):\n if isinstance(result, XMLSchemaValidationError):\n errors.append(result)\n elif result:\n text, children = result\n\n elem = converter.etree_element(element_data.tag, text, children, attributes, level)\n\n if errors:\n for e in errors:\n yield self.validation_error(validation, e, elem, **kwargs)\n yield elem\n del element_data", "def test_xml_nodes(self):\n try:\n import xmltodict\n except ImportError:\n raise unittest.SkipTest(\"Missing dependency xmltodict.\")\n\n n1 = nodes.XMLToPython()\n n2 = nodes.PythonToXML()\n\n channel = FakeChannel(self.loop)\n\n n1.channel = channel\n n2.channel = channel\n\n m = generate_msg()\n\n m.payload = '<?xml version=\"1.0\" encoding=\"utf-8\"?>\\n<test>hello</test>'\n\n base = str(m.payload)\n\n ret = self.loop.run_until_complete(n1.handle(m))\n ext_new = self.loop.run_until_complete(n2.handle(ret))\n # Check return\n self.assertTrue(isinstance(ret, message.Message))\n self.assertEqual(base, ext_new.payload, \"XML nodes not working !\")", "def process_xml(self):\n self.process_gpx_file(str(self.filename))", "def wp2fields(xml, wp_custpost=False):\r\n\r\n items = get_items(xml)\r\n for item in items:\r\n\r\n if item.find('status').string == \"publish\":\r\n\r\n try:\r\n # Use HTMLParser due to issues with BeautifulSoup 3\r\n title = HTMLParser().unescape(item.title.contents[0])\r\n except IndexError:\r\n title = 'No title [%s]' % item.find('post_name').string\r\n logger.warning('Post \"%s\" is lacking a proper title' % title)\r\n\r\n filename = item.find('post_name').string\r\n post_id = item.find('post_id').string\r\n filename = get_filename(filename, post_id)\r\n\r\n content = item.find('encoded').string\r\n raw_date = item.find('post_date').string\r\n date_object = time.strptime(raw_date, \"%Y-%m-%d %H:%M:%S\")\r\n date = time.strftime(\"%Y-%m-%d %H:%M\", date_object)\r\n author = item.find('creator').string\r\n\r\n categories = [cat.string for cat in item.findAll('category', {'domain' : 'category'})]\r\n # caturl = [cat['nicename'] for cat in item.find(domain='category')]\r\n\r\n tags = [tag.string for tag in item.findAll('category', {'domain' : 'post_tag'})]\r\n\r\n kind = 'article'\r\n post_type = item.find('post_type').string\r\n if post_type == 'page':\r\n kind = 'page'\r\n elif wp_custpost:\r\n if post_type == 'post':\r\n pass\r\n # Old behaviour was to name everything not a page as an article.\r\n # Theoretically all attachments have status == inherit so\r\n # no attachments should be here. But this statement is to\r\n # maintain existing behaviour in case that doesn't hold true.\r\n elif post_type == 'attachment':\r\n pass\r\n else:\r\n kind = post_type\r\n yield (title, content, filename, date, author, categories, tags,\r\n kind, \"wp-html\")", "def test_unhandled_xml_components():\n\n # Test when type of reaction is unhandled\n xml_filename = \"tests/test_xml_files/unhandled_rxn.xml\"\n with pytest.raises(NotImplementedError):\n parser = XMLParser(xml_filename)\n\n # Test when reaction rate coefficient is unhandled\n xml_filename = \"tests/test_xml_files/unhandled_k.xml\"\n with pytest.raises(NotImplementedError):\n parser = XMLParser(xml_filename)\n\n # Test when units are undhandled\n with pytest.raises(NotImplementedError):\n xml_filename = \"tests/test_xml_files/madeup_units_4_A_arr.xml\"\n parser = XMLParser(xml_filename, convert_to_SI_units=True)\n\n with pytest.raises(NotImplementedError):\n xml_filename = \"tests/test_xml_files/madeup_units_4_A_mod_arr.xml\"\n parser = XMLParser(xml_filename, convert_to_SI_units=True)\n\n with pytest.raises(NotImplementedError):\n xml_filename = \"tests/test_xml_files/madeup_units_4_E_arr.xml\"\n parser = XMLParser(xml_filename, convert_to_SI_units=True)\n\n with pytest.raises(NotImplementedError):\n xml_filename = \"tests/test_xml_files/madeup_units_4_E_mod_arr.xml\"\n parser = XMLParser(xml_filename, convert_to_SI_units=True)", "def process(self, *args, **kwargs):\n for name in self.plugin:\n if not self.plugin[name].post_inited:\n self.plugin[name].post_init()\n return XMLStream.process(self, *args, **kwargs)", "def unwrap(xml_file, missing_message=\"NO TRANSLATION AVAILABLE\"):\n tree = ET.parse(xml_file) \n\n # Find and check the source langs, ref langs and translators\n src_langs, ref_langs, translators = set(), set(), set()\n\n for src_doc in tree.getroot().findall(\".//src\"):\n src_langs.add(src_doc.get(\"lang\"))\n\n for ref_doc in tree.getroot().findall(\".//ref\"):\n ref_langs.add(ref_doc.get(\"lang\"))\n translator = ref_doc.get(\"translator\")\n if translator: translators.add(translator)\n \n if len(src_langs) > 1:\n raise RuntimeError(\"Multiple source languages found\")\n\n if len(src_langs) == 0:\n raise RuntimeError(\"No source languages found\")\n\n src_lang = src_langs.pop()\n src = []\n\n if len(ref_langs) > 1:\n raise RuntimeError(\"Multiple reference languages found -- this case is not currently handled\")\n\n\n if len(ref_langs) > 0:\n if len(translators) == 0:\n LOG.info(\"No translator identifiers found -- reading first translation for each document\")\n translators.add(DEFAULT_TRANSLATOR)\n ref_lang = ref_langs.pop()\n ref = {translator : [] for translator in translators}\n else:\n LOG.info(\"No references found\")\n ref_lang = None\n ref = {}\n\n\n # Extract text\n src_sent_count,doc_count = 0,0\n for doc in tree.getroot().findall(\".//doc\"):\n doc_count += 1\n src_sents = {int(seg.get(\"id\")): seg.text for seg in doc.findall(\".//src//seg\")}\n if ref_lang: \n ref_docs = doc.findall(\".//ref\")\n trans_to_ref = {}\n\n # If no translator identifiers, we just read one reference (if any) \n # If there are translator identifiers, we add a reference for each translator\n\n def get_ref_sents(ref_doc):\n return {int(seg.get(\"id\")): seg.text for seg in ref_doc.findall(f\".//seg\")}\n\n if len(translators) == 1 and DEFAULT_TRANSLATOR in translators:\n if len(ref_docs):\n trans_to_ref[DEFAULT_TRANSLATOR] = get_ref_sents(ref_docs[0])\n else:\n trans_to_ref[DEFAULT_TRANSLATOR] = {}\n else:\n trans_to_ref = {ref_doc.get(\"translator\"): get_ref_sents(ref_doc) for ref_doc in ref_docs}\n\n for seg_id in sorted(src_sents.keys()):\n src.append(src_sents[seg_id])\n src_sent_count += 1\n if ref_lang:\n for translator in translators:\n ref[translator].append(trans_to_ref.get(translator, {translator: {}}).get(seg_id, missing_message))\n\n LOG.info(f\"Extracted {doc_count} document(s) containing {src_sent_count} sentences in {src_lang}\")\n\n\n return src_lang,src,ref_lang,ref", "def parse(self, xml_input):\n try:\n return self._parse_using_etree(xml_input)\n except ImportError:\n # No xml.etree.ccElementTree found.\n return self._parse_using_sax_parser(xml_input)", "def _convert_all(self, ast, label, idlnode_ctor):\n res = []\n found = self._find_all(ast, label)\n if not found:\n return res\n if not isinstance(found, list):\n raise RuntimeError(\"Expected list but %s found\" % type(found))\n for childAst in found:\n converted = idlnode_ctor(childAst)\n res.append(converted)\n return res", "def cleanup(self):\n for element in self.root.iter():\n element.tag = element.tag.partition('}')[-1]", "def from_xml(cls, xml_data, system, id_generator):\r\n raise NotImplementedError('Modules must implement from_xml to be parsable from xml')" ]
[ "0.54154575", "0.5362358", "0.5302721", "0.52521217", "0.5242494", "0.5195388", "0.51844907", "0.5166652", "0.51354116", "0.5108349", "0.50608504", "0.50423545", "0.50373363", "0.5032745", "0.5002228", "0.49851438", "0.49840027", "0.48968914", "0.48965812", "0.4865027", "0.48599917", "0.48551208", "0.48486394", "0.48443446", "0.48165762", "0.4811093", "0.4809542", "0.4808497", "0.48017892", "0.47934797" ]
0.7087699
0
_clear_child_ deletes this object's 'child_' attribute, then loops through all this object's attributes, and recursively calls the _clear_child method of any child_ attributes
def _clear_child_(self): try: del self.child_ except AttributeError, e: pass for attr in filter(self._child_re.match, dir(self)): try: getattr(self, attr)._clear_child_() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup(child):\n children = child.get('children', [])\n for childchild in children:\n cleanup(childchild)\n cleaned = {u'title': child['Title'], u'name': child['id'],\n u'children': children}\n child.clear()\n child.update(cleaned)", "def deep_clean(self, render=False):\n if self.cube_axes_actor is not None:\n self.cube_axes_actor = None\n\n if hasattr(self, 'edl_pass'):\n del self.edl_pass\n if hasattr(self, '_box_object'):\n self.remove_bounding_box(render=render)\n if hasattr(self, '_shadow_pass') and self._shadow_pass is not None:\n self.disable_shadows()\n try:\n if self.__charts is not None:\n self.__charts.deep_clean()\n self.__charts = None\n except AttributeError: # pragma: no cover\n pass\n\n self._render_passes.deep_clean()\n self.remove_floors(render=render)\n self.remove_legend(render=render)\n self.RemoveAllViewProps()\n self._actors = {}\n self._camera = None\n self._bounding_box = None\n self._marker_actor = None\n self._border_actor = None\n # remove reference to parent last\n self.parent = None", "def clear_attrs(self):\n self._attributes.clear()", "def reset(self):\n self.children.clear()", "def reset_spoofed_children(self):\n for child_type in self._spoofed_children.values():\n for child in child_type:\n self.remove_component(child)\n self._spoofed_children = {} # Unecessary?", "def _reset_cache(self):\n self._cache = None\n for child in self.children: # pylint: disable=E1101\n child._reset_cache()", "def clean_up(self):\n while len(self.__refs_for_deletion): \n attr = self.__refs_for_deletion.pop()\n obj = getattr(self, attr)\n if hasattr(obj, 'clean_up'):\n obj.clean_up()\n delattr(self, attr)", "def remove_and_preserve_children(self):\r\n \r\n if self.parent and hasattr(self.parent, 'add_child)'):\r\n for child in self._children:\r\n #Correct offsets\r\n child.x += self.x\r\n child.y += self.y\r\n self.parent.add_child(child)\r\n self._children = []\r\n self.remove()", "def _clear_node(self):\n self._element = None\n self._parent = None\n self._leftchild = None\n self._rightchild = None\n self._height = None", "def reset(self):\r\n self.key = None\r\n self.value = None\r\n self.parent = None\r\n self.left_child = None\r\n self.right_child = None\r\n self.color = BLACK\r\n self.size_tree = 0", "def _reset(self, _recursive=False):\n if self._isDefault:\n self.options = self.__class__.options.copy()\n if _recursive:\n for child in self._children:\n child.reset()\n else:\n self.options = self._defaultInstance.options.copy()", "def clear_cached_attributes(self):\n setattr(self, '_atoms', None)\n setattr(self, '_bonds', None)\n setattr(self, '_rings', None)\n setattr(self, '_ring_systems', None)", "def clear_attributes(self):\n self.attrs = etad.AttributeContainer()", "def del_child(self, child):\n\n try:\n self.children.remove(child)\n except ValueError:\n pass\n else:\n self.rebuild_children_dict()", "def reset(self):\n for c in self.children:\n c.reset()\n self.marked = False", "def child_removed(self, child):\n super(AbstractItemView, self).child_removed(child)\n self.get_member(\"_items\").reset(self)", "def unload(self) -> None:\n for attr in self._attrs:\n setattr(self, attr, None)", "def delete(self):\n\n # TODO find a way to remove this when sub-classing in HCRoot\n self.parent.del_child(self)", "def reset(self):\n for parent in self.GetParents():\n parent.reset()", "def UnSetChild(self, *args):\n return _XCAFDoc.XCAFDoc_GraphNode_UnSetChild(self, *args)", "def reset(self) -> None:\r\n self.tree.delete(*self.tree.get_children())", "def remove_child(self, child_id):\r\n self.children = [ c for c in self.children if c.id!= child_id ]", "def remove_child(self, child):\n\n self.children.remove(child)", "def reset(self):\r\n instdict = self.__dict__\r\n classdict = self.__class__.__dict__\r\n # To reset them, we simply remove them from the instance dict. At that\r\n # point, it's as if they had never been computed. On the next access,\r\n # the accessor function from the parent class will be called, simply\r\n # because that's how the python descriptor protocol works.\r\n for mname, mval in classdict.items():\r\n if mname in instdict and isinstance(mval, OneTimeProperty):\r\n delattr(self, mname)", "def remove_child(self, child):\n if self.is_root:\n return\n self.children.remove(child)\n # pylint: disable=protected-access\n child.__parent = None", "def internal_clean_children(self, course_locator):\r\n original_structure = self._lookup_course(course_locator)['structure']\r\n for block in original_structure['blocks'].itervalues():\r\n if 'fields' in block and 'children' in block['fields']:\r\n block['fields'][\"children\"] = [\r\n block_id for block_id in block['fields'][\"children\"]\r\n if LocMapperStore.encode_key_for_mongo(block_id) in original_structure['blocks']\r\n ]\r\n self.db_connection.update_structure(original_structure)\r\n # clear cache again b/c inheritance may be wrong over orphans\r\n self._clear_cache(original_structure['_id'])", "def removeChild(self, child):\n child.parents.remove(self)\n self.children.remove(child)", "def clear(self):\n\t\tself._root = None\n\t\tself._size = 0\n\t\tself._depth = 0\n\t\tself._max_chars = 1\n\t\treturn", "def detach(self):\n for child in self.children:\n child.parents.remove(self)\n for parent in self.parents:\n parent.children.remove(self)\n self.parents.clear()\n self.children.clear()", "def detach(self):\n for child in self.children:\n child.parents.remove(self)\n for parent in self.parents:\n parent.children.remove(self)\n self.parents.clear()\n self.children.clear()" ]
[ "0.7398926", "0.65832293", "0.64992577", "0.64653677", "0.64382654", "0.63919705", "0.63272446", "0.620319", "0.6158226", "0.6155242", "0.61202127", "0.6109521", "0.60912627", "0.60892636", "0.6071417", "0.60680395", "0.60535574", "0.6002126", "0.59893113", "0.5987409", "0.59417963", "0.59280235", "0.5917159", "0.59135056", "0.5882595", "0.5857564", "0.58564174", "0.5846787", "0.5790365", "0.5790365" ]
0.8680944
0
This Resource is about to handle a request. If it wants to delegate to another Resource, it can return it here.
def willHandle(self, request): return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handler(self):\n\t\treturn self.handle_request", "def handle_request(self, request):\n return self._first_handler(self, request)", "def __call__(self, *args, **kwargs):\n\t\treturn self.handler()(self.request(kwargs))", "def process(self, user, request, resource, path, meta):\n raise ValueError('Not implemented')", "def handle_request(self, request, environ, start_response,\n response_headers):\n method = environ[\"REQUEST_METHOD\"].upper()\n try:\n resource, parent_entity = self.get_resource(request)\n if request.path_option == core.PathOption.metadata:\n return self.return_metadata(\n request, environ, start_response, response_headers)\n elif request.path_option == core.PathOption.batch:\n return self.odata_error(\n request, environ, start_response, \"Bad Request\",\n \"Batch requests not supported\", 404)\n elif request.path_option == core.PathOption.count:\n if isinstance(resource, edm.Entity):\n return self.return_count(\n 1, request, environ, start_response, response_headers)\n elif isinstance(resource, edm.EntityCollection):\n resource.set_filter(\n request.sys_query_options.get(\n core.SystemQueryOption.filter,\n None))\n return self.return_count(\n len(resource), request, environ, start_response,\n response_headers)\n else:\n raise core.BadURISegment(\n \"$count must be applied to \"\n \"an EntitySet or single EntityType instance\")\n elif request.path_option == core.PathOption.links:\n # parent_entity will be source entity\n # request.links_property is the name of the navigation\n # property in the source entity\n # resource will be the target entity, a collection or\n # None\n if not isinstance(parent_entity, edm.Entity):\n raise core.BadURISegment(\"$links must be preceded by a \"\n \"single EntityType instance\")\n if method == \"GET\":\n # open the collection and select the key properties only\n if isinstance(resource, edm.EntityCollection):\n with resource as collection:\n collection.select_keys()\n collection.set_page(\n request.sys_query_options.get(\n core.SystemQueryOption.top, None),\n request.sys_query_options.get(\n core.SystemQueryOption.skip, None),\n request.sys_query_options.get(\n core.SystemQueryOption.skiptoken, None))\n inlinecount = request.sys_query_options.get(\n core.SystemQueryOption.inlinecount, None)\n collection.set_inlinecount(\n inlinecount == core.InlineCount.allpages)\n return self.return_links(\n collection, request, environ, start_response,\n response_headers)\n elif isinstance(resource, edm.Entity):\n # should have just a single link\n return self.return_link(\n resource, request, environ, start_response,\n response_headers)\n else:\n # resource is None - no linked entity\n raise core.MissingURISegment(\n \"%s, no entity is related\" %\n request.links_property)\n elif method == \"POST\":\n if resource is None:\n # can you POST to Orders(1)/$links/Customer ? - only if\n # it is currently NULL (0..1)\n resource = parent_entity[\n request.links_property].open()\n if isinstance(resource, edm.EntityCollection):\n with resource as collection:\n target_entity = self.read_entity_from_link(environ)\n collection[target_entity.key()] = target_entity\n return self.return_empty(\n start_response, response_headers)\n else:\n # you can't POST to a single link that already exists\n raise core.BadURISegment(\n \"%s is already linked, use PUT \"\n \"instead of POST to update it\" %\n request.links_property)\n elif method == \"PUT\":\n if parent_entity[request.links_property].isCollection:\n raise core.BadURISegment(\n \"%s: can't update a link with multiplicity *\" %\n request.links_property)\n with parent_entity[\n request.links_property].open() as \\\n collection:\n target_entity = self.read_entity_from_link(environ)\n collection.replace(target_entity)\n return self.return_empty(start_response, response_headers)\n elif method == \"DELETE\":\n if isinstance(resource, edm.EntityCollection):\n raise core.BadURISegment(\n \"%s: DELETE must specify a single link\" %\n request.links_property)\n elif resource is None:\n raise core.MissingURISegment(\n \"%s, no entity is related\" %\n request.links_property)\n with parent_entity[\n request.links_property].open() as \\\n collection:\n del collection[resource.key()]\n return self.return_empty(start_response, response_headers)\n else:\n raise core.InvalidMethod(\"%s not supported here\" % method)\n elif isinstance(resource, edm.Entity):\n if method == \"GET\" or method == \"HEAD\":\n if request.path_option == core.PathOption.value:\n if resource.type_def.has_stream():\n return self.return_stream(\n resource, request, environ, start_response,\n response_headers, method)\n else:\n raise core.BadURISegment(\n \"$value cannot be used since \"\n \"the entity is not a media stream\")\n else:\n self.expand_resource(resource,\n request.sys_query_options)\n return self.return_entity(\n resource, request, environ, start_response,\n response_headers)\n elif method == \"PUT\":\n if request.path_option == core.PathOption.value:\n if resource.type_def.has_stream():\n sinfo = core.StreamInfo()\n if \"CONTENT_TYPE\" in environ:\n sinfo.type = params.MediaType.from_str(\n environ[\"CONTENT_TYPE\"])\n input = messages.WSGIInputWrapper(environ)\n with resource.entity_set.open() as coll:\n coll.update_stream(input,\n resource.key(),\n sinfo)\n # need to update the resource as some fields\n # may have changed\n resource = coll[resource.key()]\n self.set_etag(resource, response_headers)\n return self.return_empty(\n start_response, response_headers)\n else:\n raise core.BadURISegment(\n \"$value cannot be used since the entity is \"\n \"not a media stream\")\n else:\n # update the entity from the request\n self.read_entity(resource, environ)\n resource.commit()\n # now we've updated the entity it is safe to calculate\n # the ETag\n self.set_etag(resource, response_headers)\n return self.return_empty(\n start_response, response_headers)\n elif method == \"DELETE\":\n if request.path_option == core.PathOption.value:\n raise core.BadURISegment(\n \"$value cannot be used with DELETE\")\n resource.delete()\n return self.return_empty(start_response, response_headers)\n else:\n raise core.InvalidMethod(\"%s not supported here\" % method)\n elif isinstance(resource, edm.EntityCollection):\n if method == \"GET\":\n self.expand_resource(resource, request.sys_query_options)\n resource.set_filter(\n request.sys_query_options.get(\n core.SystemQueryOption.filter,\n None))\n resource.set_orderby(\n request.sys_query_options.get(\n core.SystemQueryOption.orderby,\n None))\n resource.set_page(\n request.sys_query_options.get(\n core.SystemQueryOption.top, None),\n request.sys_query_options.get(\n core.SystemQueryOption.skip, None),\n request.sys_query_options.get(\n core.SystemQueryOption.skiptoken, None))\n inlinecount = request.sys_query_options.get(\n core.SystemQueryOption.inlinecount, None)\n resource.set_inlinecount(\n inlinecount == core.InlineCount.allpages)\n return self.return_entity_collection(\n resource, request, environ, start_response,\n response_headers)\n elif (method == \"POST\" and\n resource.is_medialink_collection()):\n # POST of a media resource\n sinfo = core.StreamInfo()\n if \"CONTENT_TYPE\" in environ:\n sinfo.type = params.MediaType.from_str(\n environ[\"CONTENT_TYPE\"])\n if \"HTTP_LAST_MODIFIED\" in environ:\n sinfo.modified = params.FullDate.from_http_str(\n environ[\"HTTP_LAST_MODIFIED\"])\n input = messages.WSGIInputWrapper(environ)\n if \"HTTP_SLUG\" in environ:\n slug = app.Slug(environ[\"HTTP_SLUG\"])\n # if the slug is a bracketed string treat it\n # as the key predicate\n key = None\n kp = slug.slug.strip()\n if kp and kp[0] == '(' and kp[-1] == ')':\n try:\n name, kp = core.ODataURI.split_segment(kp)\n # kp is a dictionary for the entity key\n key = resource.entity_set.get_key(kp)\n except ValueError:\n pass\n if not key:\n key = resource.entity_set.extract_key(slug.slug)\n else:\n slug = key = None\n entity = resource.new_stream(input, sinfo=sinfo, key=key)\n if slug:\n for k, v in entity.data_items():\n # catch property-level feed customisation here\n property_def = entity.type_def[k]\n if (property_def.get_target_path() ==\n [(atom.ATOM_NAMESPACE, \"title\")]):\n entity[k].set_from_value(slug.slug)\n resource.update_entity(entity)\n break\n response_headers.append(\n ('Location', str(entity.get_location())))\n return self.return_entity(\n entity, request, environ, start_response,\n response_headers, 201, \"Created\")\n elif method == \"POST\":\n # POST to an ordinary entity collection\n entity = resource.new_entity()\n # read the entity from the request\n self.read_entity(entity, environ)\n resource.insert_entity(entity)\n response_headers.append(\n ('Location', str(entity.get_location())))\n return self.return_entity(\n entity, request, environ, start_response,\n response_headers, 201, \"Created\")\n else:\n raise core.InvalidMethod(\"%s not supported here\" % method)\n elif isinstance(resource, edm.EDMValue):\n if method == \"GET\":\n if request.path_option == core.PathOption.value:\n if resource:\n return self.return_dereferenced_value(\n parent_entity, resource, request, environ,\n start_response, response_headers)\n else:\n raise core.MissingURISegment(\n \"%s (NULL)\" % resource.p_def.name)\n else:\n return self.return_value(\n parent_entity, resource, request, environ,\n start_response, response_headers)\n elif method == \"PUT\":\n if request.path_option == core.PathOption.value:\n if resource:\n self.read_dereferenced_value(resource, environ)\n else:\n raise core.MissingURISegment(\n \"%s (NULL)\" % resource.p_def.name)\n else:\n self.read_value(resource, environ)\n parent_entity.commit()\n self.set_etag(parent_entity, response_headers)\n return self.return_empty(start_response, response_headers)\n elif method == \"DELETE\":\n if request.path_option == core.PathOption.value:\n raise core.BadURISegment(\n \"$value cannot be used with DELETE\")\n # make this one NULL, only if it is nullable\n if resource.p_def and not resource.p_def.nullable:\n raise core.InvalidMethod(\n \"DELETE failed, %s property is not nullable\" %\n resource.p_def.name)\n resource.value = None\n parent_entity.commit()\n return self.return_empty(start_response, response_headers)\n else:\n raise core.InvalidMethod(\"%s not supported here\" % method)\n elif isinstance(resource, edm.FunctionCollection):\n return self.return_collection(\n resource, request, environ, start_response,\n response_headers)\n else:\n # None or the DataService object: means we are trying to get\n # the service root\n response_type = self.content_negotiation(\n request, environ, self.ServiceRootTypes)\n if response_type is None:\n return self.odata_error(\n request, environ, start_response, \"Not Acceptable\",\n 'atomsvc+xml or json formats supported', 406)\n elif response_type == \"application/json\":\n return self.return_json_root(\n request, environ, start_response, response_headers)\n else:\n # override the default handling of service root to improve\n # content negotiation\n data = to_text(self.serviceDoc).encode('utf-8')\n response_headers.append(\n (\"Content-Type\", str(response_type)))\n response_headers.append((\"Content-Length\", str(len(data))))\n start_response(\"200 Ok\", response_headers)\n return [data]\n except core.MissingURISegment as e:\n return self.odata_error(\n request, environ, start_response, \"Resource not found\",\n \"Resource not found for segment %s\" % str(e), 404)\n except core.BadURISegment as e:\n return self.odata_error(\n request, environ, start_response, \"Bad Request\",\n \"Resource not found for segment %s\" % str(e), 400)\n except edm.NavigationError as e:\n return self.odata_error(\n request, environ, start_response, \"NavigationError\", str(e),\n 403)\n except edm.ConstraintError as e:\n return self.odata_error(\n request, environ, start_response, \"ConstraintError\", str(e),\n 403)\n except NotImplementedError as e:\n return self.odata_error(\n request, environ, start_response, \"NotImplementedError\",\n str(e), 405)", "def handle_request(self, path=None):\n req = get_request()\n resp = super().handle_request(req)\n return to_response(resp)", "def dispatch(self, request, **resources):\r\n\r\n # Fix PUT and PATH methods in Django request\r\n request = fix_request(request)\r\n\r\n # Set self identifier\r\n self.identifier = request.META.get('REMOTE_ADDR', 'anonymous')\r\n\r\n # Send ADREST started signal\r\n api_request_started.send(self, request=request)\r\n\r\n # Send current api started signal\r\n if self.api:\r\n self.api.request_started.send(self, request=request)\r\n\r\n try:\r\n\r\n # Check request method\r\n self.check_method_allowed(request)\r\n\r\n # Authentificate\r\n self.authenticate(request)\r\n\r\n # Throttle check\r\n self.throttle_check()\r\n\r\n if request.method != 'OPTIONS' or not ADREST_ALLOW_OPTIONS:\r\n\r\n # Parse content\r\n request.data = self.parse(request)\r\n\r\n # Get required resources\r\n resources = self.get_resources(\r\n request, **resources)\r\n\r\n # Check owners\r\n self.check_owners(request, **resources)\r\n\r\n # Check rights for resources with this method\r\n self.check_rights(resources, request=request)\r\n\r\n response = self.handle_request(request, **resources)\r\n\r\n # Serialize response\r\n response = self.emit(response, request=request)\r\n\r\n except Exception as e:\r\n response = self.handle_exception(e, request=request)\r\n\r\n response[\"Allow\"] = ', '.join(self._meta.allowed_methods)\r\n response[\"Vary\"] = 'Authenticate, Accept'\r\n\r\n # Send errors on mail\r\n adrest_errors_mail(response, request)\r\n\r\n # Send finished signal\r\n api_request_finished.send(\r\n self, request=request, response=response, **resources)\r\n\r\n # Send finished signal in API context\r\n if self.api:\r\n self.api.request_finished.send(\r\n self, request=request, response=response, **resources)\r\n\r\n return response", "def handle_request(self, request, **resources):\r\n if not request.method in self._meta.callmap.keys():\r\n raise HttpError(\r\n 'Unknown or unsupported method \\'%s\\'' % request.method,\r\n status=status.HTTP_501_NOT_IMPLEMENTED)\r\n\r\n # Get the appropriate create/read/update/delete function\r\n view = getattr(self, self._meta.callmap[request.method])\r\n\r\n # Get function data\r\n return view(request, **resources)", "def _handle(self, *args, **options):\n return super()._handle(*args, **options)", "def dispatch_request(self, *args, **kwargs):\n self.args = args\n self.kwargs = kwargs\n self.meth = request.method.lower()\n self.resource = current_app.blueprints.get(request.blueprint, None)\n\n if not any([self.meth in self.methods, self.meth.upper() in self.methods]):\n return self.return_error(405)\n\n self.process_before_request_hooks()\n\n resp = super(Endpoint, self).dispatch_request(*args, **kwargs)\n resp = self.make_response(resp)\n\n resp = self.process_after_request_hooks(resp)\n\n return resp", "async def handle(self, request: StarletteRequest,\n resource: ResourceInterface,\n **kwargs) -> StarletteResponse:\n\n connection = await self.database.acquire()\n\n try:\n response = await self.run_safe(resource.on_request,\n Request(request),\n connection=connection, logger=None,\n **kwargs)\n finally:\n await connection.close()\n\n if isinstance(response, (BytesResponse, PlainTextResponse)):\n response_class = StarlettePlainTextResponse\n elif isinstance(response, JSONResponse):\n response_class = StarletteUJSONResponse\n elif isinstance(response, SuccessfulResponse):\n return StarlettePlainTextResponse('ok')\n else:\n raise NotImplementedError(\n \"{} are not supported.\".format(type(response)))\n\n return response_class(content=response.body, headers=response.headers,\n media_type=response.media_type,\n status_code=response.status_code)", "def output(self, resource):\n @wraps(resource)\n def wrapper(*args, **kwargs):\n rv = resource(*args, **kwargs)\n rv = self.responder(rv)\n return rv\n\n return wrapper", "def __call__(self, request):\n response = self.get_request(request)\n return response", "def __call__(self, *args, **kwargs):\n\t\tself.args = args\n\t\tself.kwargs = kwargs\n\t\tself.request = request\n\n\t\ttry:\n\t\t\tself.initial(request, *args, **kwargs)\n\n\t\t\t# Get the appropriate handler method\n\t\t\tif request.method.lower() in self.declared_methods:\n\t\t\t\thandler = getattr(self, request.method.lower(), self.http_method_not_allowed)\n\t\t\telse:\n\t\t\t\thandler = self.http_method_not_allowed\n\n\t\t\tresponse = handler(*args, **kwargs)\n\t\t\tif response is None:\n\t\t\t\tresponse = self.dispatch()\n\n\t\texcept Exception as e:\n\t\t\tresponse = self.handle_exception(e)\n\n\t\treturn self.finalize_response(request, response, *args, **kwargs)", "def process_request(self, req, resp, resource, params):", "def process_resource(self, req, resp, resource, params):\n\n # Step 1: for 'rest-based' and 'rest&time-based' eviction strategies the\n # POST/PATCH/PUT/DELETE calls are never cached, they should never be\n # loaded from cache as they must always execute,\n # so for those we don't need to try to search the cache\n if self.cache_config['CACHE_EVICTION_STRATEGY'] in [CacheEvictionStrategy.rest_based,\n CacheEvictionStrategy.rest_and_time_based] \\\n and req.method.upper() in [HttpMethods.POST,\n HttpMethods.PATCH,\n HttpMethods.PUT,\n HttpMethods.DELETE]:\n return\n\n # Step 2: determine whether the given responder has caching setup\n # and if not then short-circuit to save on the lookup of request in the cache\n # as anyhow this request was not marked to be cached\n\n # find out which responder (\"on_...\" method) is going to be used to process this request\n responder = None\n for _method in dir(resource):\n if _DECORABLE_METHOD_NAME.match(_method) and _method[3:].upper() == req.method.upper():\n responder = _method\n break\n\n if responder:\n # get the name of the responder wrapper, which for cached objects is 'cache_wrap'\n # see the \"Cache.cache\" decorator in cache.py\n responder_wrapper_name = getattr(getattr(resource, responder), '__name__')\n\n # is the given method (or its class) decorated by the cache_wrap being the topmost decorator?\n if responder_wrapper_name == 'cache_wrap':\n logger.debug(\" This endpoint is decorated by 'cache' being the topmost decorator.\")\n else:\n # 'cache_wrap' is not the topmost decorator - let's check whether 'cache' is\n # any of the other decorator on this method (not the topmost):\n # this requires the use of @register(decor1, decor2) as the decorator\n if hasattr(getattr(resource, responder), '_decorators') and \\\n 'cache' in [d._decorator_name for d in getattr(resource, responder)._decorators\n if hasattr(d, '_decorator_name')]:\n logger.debug(\" This endpoint is decorated by 'cache', but it is NOT the topmost decorator.\")\n else:\n # no cache was requested on this responder as no decorator at all\n logger.debug(\" No 'cache' was requested for this endpoint.\")\n return\n\n # Step 3: look up the record in the cache\n key = self.generate_cache_key(req)\n data = self.cache.get(key)\n\n if data:\n # if the CACHE_CONTENT_TYPE_JSON_ONLY = True, then we are NOT\n # caching the response's Content-Type, only its body\n if self.cache_config['CACHE_CONTENT_TYPE_JSON_ONLY']:\n if FALCONVERSION_MAIN < 3:\n resp.body = self.deserialize(data)\n else:\n resp.text = self.deserialize(data)\n else:\n if FALCONVERSION_MAIN < 3:\n resp.content_type, resp.body = self.deserialize(data)\n else:\n resp.content_type, resp.text = self.deserialize(data)\n resp.status = HTTP_200\n req.context.cached = True\n\n # Short-circuit any further processing to skip any remaining\n # 'process_request' and 'process_resource' methods, as well as\n # the 'responder' method that the request would have been routed to.\n # However, any 'process_response' middleware methods will still be called.\n resp.complete = True", "def dispatch_request(self, *args, **kwargs):\n try:\n return super().dispatch_request(*args, **kwargs)\n except HTTPException as e:\n logger.error(\"HTTP Error on APIResource %s\", e, exc_info=1)\n return return_response({\n \"code\": e.code,\n \"message\": e.description\n }, e.code)\n except BaseException as e:\n logger.error(\"Error occurred in APIResource %s\", e, exc_info=1)\n return return_response({\n \"code\": 500,\n \"message\": str(e)\n }, 500)", "def process_request(self, req):\n return None", "def dispatch(self, request, *args, **kwargs):\n self.args = args\n self.kwargs = kwargs\n request = self.initialize_request(request, *args, **kwargs)\n\n self.request = request\n self.headers = self.default_response_headers # deprecate?\n\n try:\n _logger.info('start processing method(%s) path(%s) username(%s)',\n request.method, request.get_full_path(), request.user.username)\n self.initial(request, *args, **kwargs)\n # Get the appropriate handler method\n if request.method.lower() in self.http_method_names:\n handler = getattr(self, request.method.lower(),\n self.http_method_not_allowed)\n else:\n handler = self.http_method_not_allowed\n\n response = handler(request, *args, **kwargs)\n\n except Exception as exc: # pylint: disable=broad-except\n response = self.handle_exception(exc)\n _logger.error('Application error header:%s detail: %s',\n response.data['header'], response.data['detail'],\n exc_info=True)\n\n self.response = self.finalize_response(request, response, *args, **kwargs)\n _logger.info('end processing method(%s) path(%s) username(%s)',\n request.method, request.get_full_path(), request.user.username)\n return self.response", "def dispatch(self, *args, **kwargs):\n return super().dispatch(*args, **kwargs)", "def _request(self, *args, **kwargs):\n request = self._make_request(*args, **kwargs)\n\n return self._collect_request(request)", "def dispatch_any(self, request, handler, *args, **kwargs):\r\n return handler(request, *args, **kwargs)", "def process(r: object) -> Any:\n if isinstance(r, Response):\n r = r._applyToRequest(request)\n\n if IResource.providedBy(r):\n request.render( # type: ignore[attr-defined]\n getChildForRequest(r, request)\n )\n return StandInResource\n\n if IRenderable.providedBy(r):\n renderElement(request, r)\n return StandInResource\n\n return r", "def handle(self):\n raise NotImplementedError", "def _request(self, *args, **kwargs):\n raise NotImplementedError()", "def dispatch(self, request, *args, **kwargs):\n return super().dispatch(request, *args, **kwargs)", "def process_request(self, request):\n return None", "def __call__(self, req):\n return self._router", "def _wrap(self, resource):\n\t\treturn ResourceWrapper(self, resource)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)" ]
[ "0.67229843", "0.6442055", "0.63741136", "0.62232375", "0.61168444", "0.61049634", "0.6079845", "0.6040411", "0.60218996", "0.6000085", "0.5869706", "0.58631307", "0.58325726", "0.5815255", "0.5810135", "0.5737306", "0.5734817", "0.5728726", "0.5722223", "0.572133", "0.5708564", "0.5686651", "0.56829363", "0.5679624", "0.56299794", "0.5621368", "0.5589249", "0.55797", "0.55699456", "0.5551617" ]
0.67254317
0
post Override this to handle a form post. Return a URL to be redirected to.
def post(self, request, form): return request.uri()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self, request):\n user = self._authenticate(request)\n is_valid = self._validate_user(request, user)\n if user and is_valid:\n login(request, user)\n url = request.POST.get('next', '/')\n print request.POST\n return redirect(url)\n response = self._return_invalid_message(request)\n return response", "def post(self, request, *args, **kwargs):\n try:\n form = self.get_form()\n except RedirectNeeded as exc:\n messages.add_message(request, messages.SUCCESS, \"Payment redirects to %s\" % exc.args[0])\n return HttpResponseRedirect(exc.args[0])\n #except Exception as exc:\n # return HttpResponseBadRequest(exc, content_type=\"text/plain\")\n\n if form.validate():\n messages.add_message(request, messages.SUCCESS, \"Payment succeeded\")\n return self.form_valid(form)\n else:\n messages.add_message(request, messages.ERROR, \"Payment failed\")\n return self.form_invalid(form)", "def post(self, *args, **kwargs):\n if len(args) != 2:\n raise TypeError('wrong number of arguments')\n kwargs[\"post\"] = args[1]\n return self._geturl.get(args[0], **kwargs)", "def post(self, request, *args, **kwargs):\n form = self.get_form(self.form_class)\n if form.is_valid():\n return self.form_valid(form)\n else:\n return self.form_invalid(form)", "def post(self, *args, **kwargs):\n return self.handle_post_request()", "def post():\n pass", "def form_valid(self, form):\n try:\n form.save(self.request)\n messages.success(self.request, 'Your comment has been posted')\n except exceptions.RapidCommentingError as e:\n messages.error(self.request, e)\n\n return redirect(form.cleaned_data['next'] \\\n or self.request.META['HTTP_REFERER'])", "def post(self, *args, **kwargs):\n self.request(\"post\", *args, **kwargs)", "def post(self, request, *args, **kwargs):\n form = self.get_form()\n if form.validate():\n return self.form_valid(form)\n else:\n return self.form_invalid(form)", "def _post(self, *args, **kwargs):\n return self._request('post', *args, **kwargs)", "def post(self, request, *args, **kwargs):\n form = self.get_form()\n if form.is_valid():\n self.use_template(self.render_template())\n return self.form_valid(form)\n else:\n return self.form_invalid(form)", "def post(self) :\n self.redirect('/admin')", "def get_post_redirect(request, get_callback, post_callback,\n get_template_name = None, post_template_name = None):\n if request.method == \"GET\": \n if get_template_name: return get_callback(request, get_template_name)\n else: return get_callback(request)\n elif request.method == \"POST\": \n if post_template_name: return post_callback(request, post_template_name) \n else: return post_callback(request)\n raise Exception(\"Request method could not be matched. Expected a GET or a POST.\")", "def post(self, request, *args, **kwargs):\n form = self.get_form()\n if form.is_valid():\n return self.form_valid(form)\n else:\n self.form_invalid_init(form=form)\n self.form_invalid_add_global_errormessages(form=form)\n return self.form_invalid(form)", "def do_POST(self):\r\n self._send_handler_response('POST')", "def _handle_post_request(self):\n form = cgi.FieldStorage(\n fp=self.rfile,\n headers=self.headers,\n environ={'REQUEST_METHOD': 'POST'})\n\n if self.path == '/URLRequest':\n # First we check, whether the formular has been filled by\n # something behaving like a bot\n if form.has_key('URL'):\n self._send_homepage('<p class=\"warning\">Please check your input</p>')\n return\n else:\n url = form['real_URL'].value if form.has_key('real_URL') else None\n tmp = self._insert_url_to_db(url)\n if tmp:\n try:\n blocked = self._db.is_hash_blocked(tmp)\n if tmp < 0:\n self._send_database_problem()\n return\n elif blocked:\n self._send_blocked_page(blocked[3])\n return\n else:\n self._send_return_page(tmp)\n return\n except YuDatabaseError:\n self._send_database_problem()\n return\n else:\n # There was a general issue with URL\n self._send_homepage('''<p class=\"warning\">Please check your input.</p>''')\n return\n elif self.path == '/ContactUs':\n if form.has_key('URL'):\n # Here we might have a bot who likes to send the webmaster some spam\n # who most likely will be not amused about.\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail NOT sent',\n msg='There was an issue with your request. Are you a bot? '\n '<a href=\"/ContactUs\">Please try again</a>.')\n else:\n try:\n email = form['email'].value\n subj = form['subject'].value\n descr = form['request'].value\n if self._send_mail(subj, descr, email):\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail sent',\n msg=\"Your request has been sent. You will receive an answer soon.\")\n else:\n self._send_internal_server_error()\n return\n except KeyError:\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail NOT sent',\n msg='It appers you did not fill out all needed fields.\\\n <a href=\"/ContactUs\">Please try again</a>.')\n\n elif self.path == '/Show':\n short_url = form['ShortURL'].value if form.has_key('ShortURL') else None\n if short_url != None and short_url.find(\"yaturl.net\") > -1:\n tmp = short_url.rfind(\"/\")\n if tmp > -1 and short_url != \"\":\n tmp = tmp + 1\n short_url = short_url[tmp:]\n if short_url != None and short_url.isalnum():\n try:\n result = self._db.get_link_from_db(short_url)\n except YuDatabaseError:\n self._send_database_problem()\n return\n template_filename = self._get_config_template('showpage')\n if result:\n new_url = '<p><a href=\"%(result)s\">%(result)s</a></p>' % \\\n {'result': result}\n else:\n new_url = '<p class=\"warning\">No URL found for this string. Please double check your\\\n <a href=\"/ShowURL\">input and try again</a></p>'\n\n stats = self._db.get_statistics_for_hash(short_url)\n\n text = read_template(\n template_filename,\n title=SERVER_NAME,\n header=SERVER_NAME,\n msg=new_url,\n stat=stats,\n statspage=\"/stats/\" + short_url)\n else:\n self._send_404()\n return\n\n else:\n self._send_404()\n return\n\n self._send_response(text, 200)", "def post(self, request, *args, **kwargs):\n self.object = None\n form_class = self.get_form_class()\n form = self.get_form(form_class)\n if form.is_valid():\n return self.form_valid(form, request)\n else:\n return self.form_invalid(form, request)", "def post():\n\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(title=form.title.data,\n date=form.date.data,\n time_spent=form.time_spent.data,\n details=form.details.data,\n remember=form.remember.data)\n return redirect(url_for('index'))\n return render_template('new.html', form=form)", "def post(self):\n cont = self.request_string('continue', default=\"/\")\n self.redirect(users.create_login_url(cont))", "def handle_post(cls, **kwargs):\n raise NotImplementedError", "def get_success_url(self):\n return reverse('post-detail', kwargs={'pk': self.kwargs['pk'],})", "def post(self, request, *args, **kwargs):\n if \"add_post\" in request.POST:\n form = FlashbackPostForm(request.POST)\n if form.is_valid():\n # write journal\n form.add_post(self.get_object(), self.poster)\n else:\n raise Http404(form.errors)\n return HttpResponseRedirect(\n reverse(\n \"character:flashback_post\",\n kwargs={\n \"object_id\": self.character.id,\n \"flashback_id\": self.get_object().id,\n },\n )\n )", "def post(self):\n self.get_or_post(method='POST')", "def get_success_url(self):\n return reverse('post-detail', kwargs={'pk': self.kwargs['pk'], })", "def post(self, *args, **kwargs):\n return self._hit(\"POST\", *args, **kwargs)", "def input_post(): #TODO, error handling for privacy checks\n\n message = request.form['message']\n page_token = session['page']['access_token']\n resp = utils.post_message(message, page_token, session['visibility'])\n return render_template('success.html', post_id = resp['id'])", "def post(self):\n subject = self.request.get('subject')\n post_content = self.request.get('post_content')\n submit = self.request.get('submit')\n cancel = self.request.get('cancel')\n user = self.get_active_user()\n created_by = int(user.key().id())\n post_id = self.request.get('post_id')\n\n if not user:\n self.redirect('/login')\n if post_id:\n post = Posts.get_by_id(int(post_id))\n else:\n post = None\n\n if cancel == \"cancel\":\n self.redirect('/%s' % str(post.key().id()))\n return\n if (post and post.submitter_id == user.key().id()) or not post:\n if submit == \"submit\" and subject and post_content:\n if post:\n post.subject = subject\n post.content = post_content\n post.put()\n else:\n post = Posts(subject=subject,\n content=post_content,\n submitter_id=created_by)\n post.put()\n self.redirect('/%s' % str(post.key().id()))\n else:\n self.render_newpage(user=user,\n subject=subject,\n post_content=post_content,\n error=\"\"\"Please provide both a subject and a\n post!\"\"\")\n else:\n self.redirect('/login')", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass" ]
[ "0.6832064", "0.67205185", "0.64404494", "0.6379828", "0.63197786", "0.63160264", "0.62764686", "0.62760127", "0.6269952", "0.62224627", "0.6205702", "0.61954165", "0.6178908", "0.6143098", "0.61342144", "0.6116767", "0.6080857", "0.6053228", "0.60354316", "0.60266346", "0.6014931", "0.60108656", "0.60084397", "0.6005442", "0.59993345", "0.5977364", "0.59719557", "0.59673053", "0.59673053", "0.59673053" ]
0.72291327
0
Flying wing with winglets.
def flying_wing(sections): _wing = wl.FlyingWing(sections=sections, winglet_parameters=None) _wing.create_wing_planform() return _wing
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wing(self):\n return", "def flying_wing_winglets(sections):\n\n SPAN = WingletParameters.SPAN.value\n ANGLE_CANT = WingletParameters.ANGLE_CANT.value\n ANGLE_SWEEP = WingletParameters.ANGLE_SWEEP.value\n ANGLE_TWIST_ROOT = WingletParameters.ANGLE_TWIST_ROOT.value\n ANGLE_TWIST_TIP = WingletParameters.ANGLE_TWIST_TIP.value\n TAPER_RATIO = WingletParameters.TAPER_RATIO.value\n CHORD_ROOT = WingletParameters.CHORD_ROOT.value\n W_AIRFOIL = WingletParameters.AIRFOIL.value\n\n winglet_parameters = {\n SPAN: 0.05,\n TAPER_RATIO: 0.32,\n CHORD_ROOT: 0.65,\n ANGLE_SWEEP: 38,\n ANGLE_CANT: 45,\n ANGLE_TWIST_ROOT: 0.0,\n ANGLE_TWIST_TIP: 0.0,\n W_AIRFOIL: \"naca0012\",\n }\n\n _wing = wl.FlyingWing(sections=sections, winglet_parameters=winglet_parameters)\n\n _wing.create_wing_planform()\n _wing.create_winglet()\n\n return _wing", "def weight_update(u_ff, u_wc, alpha, beta, w, fan_all):\r\n mult_wc = np.matmul(np.reshape(hard_sigmoid_array(u_wc), (fan_all, 1)),\r\n np.reshape(hard_sigmoid_array(u_wc), (1, fan_all)))\r\n mult_ff = np.matmul(np.reshape(hard_sigmoid_array(u_ff), (fan_all, 1)),\r\n np.reshape(hard_sigmoid_array(u_ff), (1, fan_all)))\r\n delta_w = alpha * (1 / beta) * (mult_wc - mult_ff)\r\n delta_w[np.diag_indices(fan_all)] = 0\r\n w = w + delta_w\r\n return w", "def ReceiveSpike(self, w):\n self.weighted_incoming_spikes += w", "async def wink(self, ctx):\n await ctx.send('wonk')", "def _action(self, wloops: Any, beta: Any) -> Any:\n pass", "def createwel(self,mf):\n wel_sp=self.getDictWells()\n wel = flopy.modflow.ModflowWel(mf, stress_period_data=wel_sp)\n return wel", "def update(w, g, alpha = 0.03):\n return w + alpha * g", "def update_weights(self):\n activation, activation_p = self.feedforward()\n # initialize delta_weights\n delta_w = np.zeros(2)\n # simultaneous calculate delta_weights\n for i, element in enumerate(self.y):\n delta_w += (activation[i]-element)*(activation_p[i])*self.X[i]\n # update weight\n self.weights -= self.alfa*delta_w", "def render(self, wfe=True):\n # optimization (or not):\n # actuators is small, say 40x40\n # while poke_arr is ~= 10x the resolution (400x400)\n #\n # it is most optimal to set the values of poke_arr based on the mask\n # however, for such small arrays it makes little difference and the\n # code appears much less expressive\n # what is here is ~99.1% of the speed with better legibility\n\n # potential \"bug\" - it is assumed the content of actuators_work\n # where the actuators are masked off is zero, or whatever the desired\n # sticking value is. If the expected behavior for masked actuators\n # changes over the life of this instance, the user may be surprised\n # OTOH, it may be a \"feature\" that stuck actuators, etc, may be\n # adjusted in this way rather elegantly\n self.poke_arr[self.iyy, self.ixx] = self.actuators\n\n # self.dx is unused inside apply tf, but :shrug:\n sfe = apply_transfer_functions(self.poke_arr, None, self.tf, shift=False)\n if self.needs_rot:\n warped = warp(sfe, self.projx, self.projy)\n else:\n warped = sfe\n if wfe:\n warped *= (2*self.obliquity)\n\n if self.upsample != 1:\n warped = fourier_resample(warped, self.upsample)\n\n self.Nintermediate = warped.shape\n\n if warped.shape[0] < self.Nout[0]:\n # need to pad\n warped = pad2d(warped, out_shape=self.Nout)\n elif warped.shape[0] > self.Nout[1]:\n warped = crop_center(warped, out_shape=self.Nout)\n\n return warped", "def event2544():\n header(2544)\n\n if_player_has_special_effect(0, SPEFFECT.TwilightRingEquipped)\n chr.set_special_effect(CHR.Player, SPEFFECT.TwilightRingWeak)\n\n if_player_does_not_have_special_effect(0, SPEFFECT.TwilightRingEquipped)\n chr.cancel_special_effect(CHR.Player, SPEFFECT.TwilightRingWeak)\n chr.cancel_special_effect(CHR.Player, SPEFFECT.TwilightRingMedium)\n chr.cancel_special_effect(CHR.Player, SPEFFECT.TwilightRingStrong)\n\n restart()", "def set_wdiff(self):\n try:\n self.wdiff=self.mdiff*self.ws.coef[1]\n except:\n self.wdiff=self.mdiff", "def _derW(self, w, x, y, z):\n raise NotImplementedError()", "def ApplyWeights(frame):\n if \"Wpol\" not in frame and \"Wunpol\" not in frame:\n return\n\n if frame[\"T\"].weighted:\n return frame\n ValidateMaps(frame)\n\n tmap = frame.pop(\"T\")\n\n if \"Wpol\" in frame:\n wmap = frame[\"Wpol\"]\n qmap = frame.pop(\"Q\")\n umap = frame.pop(\"U\")\n maps.apply_weights(tmap, qmap, umap, wmap)\n else:\n wmap = frame[\"Wunpol\"]\n maps.apply_weights_t(tmap, wmap)\n\n frame[\"T\"] = tmap\n if \"Wpol\" in frame:\n frame[\"Q\"] = qmap\n frame[\"U\"] = umap\n\n return frame", "def update_w(self, w):\n # Need to update the scaled weights\n if self.scale_weights:\n self._scale_weights_to_degree(w)\n self._generate_weighted_adj_matrices()\n # once we get new DW matrices, multiply by weights\n super().update_w(w)\n self._degree_weight_weighted_matrices()", "def falcon():", "def update_weights(a_plus, a_minus, tau_plus, tau_minus, X, Y, pre_post_trace, post_pre_trace, trace, tau_e): \n # pre trace without spikes - for coincident spikes \n conv_pre_old, _ = convolution2(pre_post_trace, tau_plus, a_plus, 0) \n # post trace without spikes - for coincident spikes \n conv_post_old, _ = convolution2(post_pre_trace, tau_minus, a_minus, 0)\n \n # presynaptic neuron trace \n conv_pre_scaled, pre_post_trace = convolution2(pre_post_trace, tau_plus, a_plus, X)\n # postynaptic neuron trace \n conv_post_scaled, post_pre_trace = convolution2(post_pre_trace, tau_minus, a_minus, Y)\n \n # total synaptic change due to STDP \n W = (conv_pre_scaled*Y + conv_post_scaled*X)* ~(X&Y) + \\\n ((conv_pre_old*Y + conv_post_old*X)+(a_plus + a_minus)/2)*(X&Y)\n \n ## weight change is convoluted with eligibility trace \n eligibility_trace, trace = convolution2(trace, tau_e, 1, W)\n \n return pre_post_trace, post_pre_trace, eligibility_trace, trace, W", "def wrench_stamped_cb(self, ws):\n force_vec = np.array([ws.wrench.force.x, ws.wrench.force.y, ws.wrench.force.z])\n scaled_vec = np.multiply(force_vec, self.scaling)\n mag = np.linalg.norm(force_vec)\n normalized_vec = np.divide(force_vec,mag)\n \n ta = TaxelArray()\n ta.header.frame_id = '/l_netft_frame' #self.ft_link_name\n ta.header.stamp = rospy.Time.now()\n ta.sensor_type = 'force'\n ta.link_names = ['wrist_roll']\n ta.centers_x = [0.]\n ta.centers_y = [0.]\n ta.centers_z = [0.]\n ta.normals_x = [-normalized_vec[0]]\n ta.normals_y = [-normalized_vec[1]]\n ta.normals_z = [-normalized_vec[2]]\n ta.values_x = [-scaled_vec[0]]\n ta.values_y = [-scaled_vec[1]]\n ta.values_z = [-scaled_vec[2]]\n \n self.taxel_array_pub.publish(ta)\n\n m3ta = TaxelArray()\n m3ta.header.frame_id = '/l_netft_frame'\n m3ta.header.stamp = rospy.Time.now()\n m3ta.sensor_type = 'force'\n m3ta.link_names = ['wrist_roll']\n m3ta.centers_x = [0.]\n m3ta.centers_y = [0.]\n m3ta.centers_z = [0.]\n m3ta.normals_x = [normalized_vec[0]]\n m3ta.normals_y = [normalized_vec[1]]\n m3ta.normals_z = [normalized_vec[2]]\n m3ta.values_x = [scaled_vec[0]]\n m3ta.values_y = [scaled_vec[1]]\n m3ta.values_z = [scaled_vec[2]]\n \n self.m3_taxel_array_pub.publish(m3ta)", "def update_weights(self):\n\n\n self.w += self.learn_rate * (self.X.T.dot(self.T - self.Y)\n - self.reg_L1 * np.sign(self.w)\n - self.reg_L2 * 2*self.w)", "def half_space_cooling_waermefluss(k, T0, T1, kappa, t):\n return k * (T1 - T0) / (numpy.sqrt(math.pi * kappa * t))", "def make_bwfull(w,minZ,maxZ,ires=1,fixw=False,m=mz0):\n cmds = []\n # coefficients for the amplitudes\n cmds.append(\"A[1,0,1000000]\")\n cmds.append(\"B[1,0,1000000]\")\n cmds.append(\"C[10000.0,0,1000000]\")\n # amplitudes\n cmds.append('m[%s,%s,%s]'%(m,minZ,maxZ))\n cmds.append('g[2.49,0,10]')\n denom = '((x^2-m^2)^2+x^4*g^2/m^2)'\n cmds.append(\"expr::z_rbw('x^2/%s',x,m,g)\"%denom)\n cmds.append(\"expr::z_int('(x^2-m^2)/%s',x,m,g)\"%denom)\n cmds.append(\"expr::z_rad('1/(x^2)',x)\")\n # resolution model\n cmds += resolutions[ires]()\n [w.factory(cmd) for cmd in cmds]\n # any parameter adjustments\n if True:\n w.var('r_m').setConstant(kTRUE) if w.var('r_m') else None\n w.var('rt_m').setConstant(kTRUE) if w.var('rt_m') else None\n w.var('g').setConstant(kTRUE) if w.var('g') and fixw else None\n # sum-of-amplitudes pdf\n lshape = RooRealSumPdf('lshape','lshape',RooArgList(w.function('z_rad'),w.function('z_int'),w.function('z_rbw')),RooArgList(w.var('A'),w.var('B'),w.var('C')))\n getattr(w,'import')(lshape)\n # convolution\n pdf = w.pdf('lshape')\n if w.pdf('res'):\n w.var('x').setBins(10000,'cache')\n cmd = 'FCONV::sum(x,lshape,res)'\n w.factory(cmd)\n pdf = w.pdf('sum')\n return pdf, kFALSE", "def update_weights_negative(self):\n eta = self.config.eta\n self.w_xh -= eta * (self.x.T @ self.h)\n self.w_th -= eta * (self.t.T @ self.h)\n self.w_ho -= eta * (self.h.T @ self.o) \n self.w_hz -= eta * (self.h.T @ self.z)", "def update_weight(wij, yj, tj, xi, lr = 0.25):\n\n new_wij = wij - lr * ((yj - tj) * xi)\n new_wij = round(new_wij, 3)\n #print(\"\\t\", wij, \"-\", lr, \"* (\", yj, \"-\", tj, \") *\", xi, \"=\", new_wij)\n\n return new_wij", "def apply_weights(self):\n w0_array = np.ones(self.N)*self.w0\n return w0_array + self.X.dot(self.w)", "def __call__(self, X, W):\n\t\treturn", "def update_recurrent_weights_step(self):\n \n # update weights: hebbian term\n self.delta_Wee=self.learn_rate*(self.rr[0:self.N_e]-self.input_mean)*\\\n (self.rr[0:self.N_e].T-self.input_mean)\n \n self.W_ee+=self.dt*self.delta_Wee\n\n # update weights: normalize to fixed mean of incoming and outgoing weights\n self.W_ee-=(self.W_ee.mean(axis=1)-self.W_av_star)[:,np.newaxis]\n self.W_ee-=(self.W_ee.mean(axis=0)-self.W_av_star)[np.newaxis,:]\n \n # clip weights \n self.W_ee=np.clip(self.W_ee,0,self.W_max_ee)\n \n # update excitatory weights in the big weight matrix\n self.W[:self.N_e,:self.N_e]=self.W_ee", "def wadlord(update, context):\n\n msg = update.message.text\n punctuation = r\"\"\"!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\"\"\"\n msg = ''.join(c for c in msg if c not in punctuation) # Strip punctuation from message\n msg = msg.split()\n if len(msg) == 2:\n if r.choices([0, 1], weights=[0.8, 0.2], k=1)[0]: # (Returns list with one element)\n print('wad lord in', update.effective_chat.title, f'({update.effective_chat.type})')\n waddened = msg[1][0] + msg[0][1:] + ' ' + msg[0][0] + msg[1][1:] # the words exchange their first letters\n lad_bot.send_message(chat_id=update.effective_chat.id, reply_to_message_id=update.message.message_id, text=waddened)", "def update_weights_positive(self):\n eta = self.config.eta\n self.w_xh += eta * (self.x.T @ self.h)\n self.w_th += eta * (self.t.T @ self.h)\n self.w_ho += eta * (self.h.T @ self.o)\n self.w_hz += eta * (self.h.T @ self.z)", "def apply_dw(self, dw):\n\n # list of trainable params\n param_names = [\"w_out\", \"b_out\", \"w_link\", \"w_in\", \"b_in\"]\n\n for param_name in param_names:\n self.__dict__[param_name] = self.__getattribute__(\n param_name) - LEARNING_RATE*dw[\"d\" + param_name]", "def run(self):\n self.coffee_machine.water_tank.decrease_weight(self.coffee_machine.chosen_coffee_data.get('water_weight'))" ]
[ "0.66285217", "0.6478682", "0.6104694", "0.5966311", "0.5959964", "0.5788519", "0.57147986", "0.56718796", "0.56633484", "0.5654964", "0.5629386", "0.5615403", "0.5604028", "0.5589418", "0.5578025", "0.5567017", "0.55469376", "0.55456835", "0.5537543", "0.54950386", "0.5493774", "0.5490482", "0.5485934", "0.54833", "0.5482991", "0.54664785", "0.54597783", "0.54384166", "0.54357857", "0.5434656" ]
0.6692536
0
Test the sections are sorted in the correct order.
def test_instantiate_wing_sort_sections(sections): expected_sections = copy.deepcopy(sections) # Swap sections order swap = copy.deepcopy(sections[1]) sections[1] = sections[0] sections[0] = swap # Instantiate wing = FlyingWing(sections=sections, winglet_parameters=None) # Check the sections are in the right order again result_sections = wing.sections assert expected_sections == result_sections
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_order(self, filename: str, section: str, texts: List[str]):\n alphas = sorted(texts, key=lambda x: x.split(':')[0].lower())\n if texts == alphas:\n return\n for text, alpha in zip(texts, alphas):\n if text != alpha:\n print(f'{filename}: {section}: {text} vs {alpha}')\n break", "def test_section_iterator(self):\n ars = self.ar[2009][11]['general']\n self.assertEqual(list(ars), self.__class__.wanted_lines)", "def test_section_keys(self):\n ars = self.ar[2009][11]['general']\n self.assertEqual(ars.keys(), self.__class__.wanted_lines)", "def sort(self):\n for section, section_items in self.items():\n if sorted(section_items) == list(section_items):\n continue\n\n section_dict = {k: v for k, v in section_items.items()}\n\n for k in list(section_items):\n self.remove_option(section, k)\n\n for k, v in sorted(section_dict.items()):\n self.set(section, k, v)", "def testSortedNotes(self):\n for simple_score in self.simple_scores.values():\n notes = simple_score.sorted_notes\n assert all(notes[i].start_time <= notes[i + 1].start_time\n for i in range(len(notes) - 1))", "def test_section_is_correct(self):\n res = self.client().post('/api/v1/sections/', headers={'Content-Type': 'application/json'},\n data=json.dumps(self.section))\n get_res = self.client().get('api/v1/sections/', headers={'Content-Type': 'application/json'})\n records = json.loads(get_res.data)\n self.assertEqual(records[0]['title'], 'Test Title')", "def testSectionCount(self):\n\n self.sectionCount(3640)", "def test_sorted_page_stream(self):\n self._test_insertion(Macros, 0)", "def test_sort_lines(self):\n before_b = \"\"\"\\\n first line\n line 1\n line a\n line b\n line c\n last line\n \"\"\"\n after_b = \"\"\"\\\n first line\n line b\n line a\n line 1\n line c\n last line\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"2.0\", \"5.6\"),\n after_sel=(\"2.0\", \"5.6\"),\n command_name=\"sort-lines\",\n )", "def test04_vms_page_table_sorting(self):\n self.lg('%s STARTED' % self._testID)\n self.lg('sorting of all fields of vms table, should be working as expected')\n self.assertTrue(self.Tables.check_sorting_table('machines'))\n self.lg('%s ENDED' % self._testID)", "def test_sections_flat_spider(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story,\n layout=layout,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story, layout=layout)\n section3 = create_section(title=\"Who has been involved\", \n story=story, layout=layout)\n section4 = create_section(title=\"Next steps\", story=story, layout=layout)\n SectionRelation.objects.create(parent=section1, child=section2,\n weight=0)\n SectionRelation.objects.create(parent=section1, child=section3,\n weight=1)\n SectionRelation.objects.create(parent=section1, child=section4,\n weight=2)\n self.assertEqual(story.structure.sections_flat, [section1, section2,\n section3, section4])", "def test_get_sections_json(self):\n json_data = self.view.get_sections_json(story=self.story)\n data = json.loads(json_data)\n self.assertEqual(len(data['objects']), len(self.story.sections.all()))\n section_ids = [section_data['section_id'] for section_data in data['objects']]\n for section in self.story.sections.all():\n self.assertIn(section.section_id, section_ids)", "def test_sections_flat_tree(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story,\n layout=layout,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story, layout=layout)\n section3 = create_section(title=\"Who has been involved\", \n story=story, layout=layout)\n section4 = create_section(title=\"Next steps\", story=story, layout=layout)\n section5 = create_section(title=\"Last section\", story=story, layout=layout)\n SectionRelation.objects.create(parent=section1, child=section2,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section3,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section4,\n weight=1)\n SectionRelation.objects.create(parent=section1, child=section5,\n weight=1)\n self.assertEqual(story.structure.sections_flat, [section1, section2,\n section3, section4,\n section5])", "def test_sections_flat_one_section(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story,\n layout=layout,\n root=True)\n self.assertEqual(story.structure.sections_flat, [section1])", "def test_sections_flat_linear_nested(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story,\n layout=layout,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story, layout=layout)\n section3 = create_section(title=\"Who has been involved\", \n story=story, layout=layout)\n section4 = create_section(title=\"Next steps\", story=story,\n layout=layout)\n SectionRelation.objects.create(parent=section1, child=section2)\n SectionRelation.objects.create(parent=section2, child=section3)\n SectionRelation.objects.create(parent=section3, child=section4)\n self.assertEqual(story.structure.sections_flat, [section1, section2, \n section3, section4])", "def test_sections_flat_spider(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n section1 = create_section(title=\"Background and context\",\n story=story,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story)\n section3 = create_section(title=\"Who has been involved\", \n story=story)\n section4 = create_section(title=\"Next steps\", story=story)\n SectionRelation.objects.create(parent=section1, child=section2,\n weight=0)\n SectionRelation.objects.create(parent=section1, child=section3,\n weight=1)\n SectionRelation.objects.create(parent=section1, child=section4,\n weight=2)\n self.assertEqual(story.structure.sections_flat, [section1, section2,\n section3, section4])", "def collate_sections(self,paper_text,section_list:List[Section],split_upto=0.2,split_bins=10):\n current_text_split = []\n prev_section = None\n curr_text = str(paper_text)\n unfound_sections = []\n some_section_not_found = False\n for index,s in enumerate(section_list):\n curr_text,section_status = self.split_and_find_section(curr_text,s.name,prev_section,split_upto=split_upto,split_bins=split_bins)\n if not section_status: # If couldn't match section add it here. \n some_section_not_found = True\n # print('\\n\\t'+s.name) \n prev_section = s \n for ss in s.subsections:\n curr_text,section_status = self.split_and_find_section(curr_text,ss.name,prev_section,split_upto=split_upto,split_bins=split_bins)\n if not section_status:\n some_section_not_found = True\n # print(\"Cannot Match For :\",ss.name)\n prev_section = ss\n # print('\\n\\t\\t'+ss.name)\n if index == len(section_list)-1:\n s.text = curr_text\n return section_list,some_section_not_found", "def test_headlines_order(self) -> None:\n last: Tuple[int, str] = (0, \"\")\n\n for headline in self.report.headlines:\n rule: Optional[HeadlineRules] = self.rules.get_headline_rules(headline.name)\n if (not rule) or (rule.order is None):\n continue\n\n last_order, last_headline = last # type: int, str\n if last_order > rule.order:\n self.add_error(\n (\n f\"Rubriken {headline.name} ska komma före \"\n f\"rubriken {last_headline}.\"\n ),\n headline=headline,\n )\n\n last = (rule.order, headline.name)", "def test_sections_flat_tree(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n section1 = create_section(title=\"Background and context\",\n story=story,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story)\n section3 = create_section(title=\"Who has been involved\", \n story=story)\n section4 = create_section(title=\"Next steps\", story=story)\n section5 = create_section(title=\"Last section\", story=story)\n SectionRelation.objects.create(parent=section1, child=section2,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section3,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section4,\n weight=1)\n SectionRelation.objects.create(parent=section1, child=section5,\n weight=1)\n self.assertEqual(story.structure.sections_flat, [section1, section2,\n section3, section4,\n section5])", "def test_list_sections(self):\n response = self.client.open(\n '/pablokvitca/classdeck-api/1.0.0/section',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_sections_flat_one_section(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n section1 = create_section(title=\"Background and context\",\n story=story,\n root=True)\n self.assertEqual(story.structure.sections_flat, [section1])", "def test_sections_flat_linear_nested(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n section1 = create_section(title=\"Background and context\",\n story=story,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story)\n section3 = create_section(title=\"Who has been involved\", \n story=story)\n section4 = create_section(title=\"Next steps\", story=story)\n SectionRelation.objects.create(parent=section1, child=section2)\n SectionRelation.objects.create(parent=section2, child=section3)\n SectionRelation.objects.create(parent=section3, child=section4)\n self.assertEqual(story.structure.sections_flat, [section1, section2, \n section3, section4])", "def test_section_isolation(self):\r\n params = ParameterSet.read_param_file(exepath('mocks/line_tests.txt'), 'Jitter Tolerance')\r\n\r\n self.assertEqual(len(params.keys()), 4)\r\n\r\n self.assertEqual(params['param1'], (1,2,3))\r\n self.assertEqual(params['param2'], (4,5,6))\r\n self.assertEqual(params['param3'], (7,8,9))\r\n \r\n self.assertEqual(params['ml_param'], [[1,2,3],['a', 'b', 'c'],{'C':1,'D':'Two'}])\r\n \r\n # Ensure others are not in collection\r\n keys = params.keys()\r\n self.assertFalse('my param' in keys)\r\n self.assertFalse('my_param' in keys)\r\n self.assertFalse('MYPARAM' in keys)\r\n self.assertFalse('My param' in keys) \r\n self.assertFalse('hidden_param_2' in keys)\r\n self.assertFalse('hidden_param' in keys)", "def testSorting(self):\n if self.sorting in tools.SORTINGS:\n self.assertEqual(\n self.sorting,\n self.config.sorting\n )\n else:\n self.assertNotEqual(\n self.sorting,\n self.config.sorting\n )\n self.assertEqual(\n tools.SORTING_DEFAULT,\n self.config.sorting\n )", "def multiple_sections(): # noqa: D416", "def check_sort(self):\n if self.list == []:\n return True\n seg_iter = iter(self.list)\n last = next(seg_iter)\n for segment in seg_iter:\n if last > segment:\n raise Exception('non trié')\n last = segment\n return True", "def test_get_next_section_tree(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story,\n layout=layout,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story, layout=layout)\n section3 = create_section(title=\"Who has been involved\", \n story=story, layout=layout)\n section4 = create_section(title=\"Next steps\", story=story, layout=layout)\n section5 = create_section(title=\"Last section\", story=story, layout=layout)\n SectionRelation.objects.create(parent=section1, child=section2,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section3,\n weight=0)\n SectionRelation.objects.create(parent=section2, child=section4,\n weight=1)\n SectionRelation.objects.create(parent=section1, child=section5,\n weight=1)\n self.assertEqual(story.structure.get_next_section(section1), \n section2)\n self.assertEqual(story.structure.get_next_section(section2), \n section3)\n self.assertEqual(story.structure.get_next_section(section3), \n section4)\n self.assertEqual(story.structure.get_next_section(section4), \n section5)\n self.assertEqual(story.structure.get_next_section(section5), \n None)", "def test_categories_are_sorted(self):\n self.data_sorted(self.test_data['shirts'], self.test_data['pants'])", "def test_get_next_section_linear_nested(self):\n title = (\"Neighborhood Outreach for I-70 Alignment Impacting \"\n \"Elyria, Globeville and Swansea\")\n summary = \"\"\"\n The City of Denver and Colorado Department of Transportation \n (CDOT) are working together to do neighborhood outreach\n regarding the I-70 alignment between Brighton Boulevard and\n Colorado. For detailed information on the neighborhood outreach\n efforts please visit www.DenverGov.org/ccdI70.\n \"\"\"\n byline = \"Denver Public Works and CDOT\"\n story = create_story(title=title, summary=summary, byline=byline)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(title=\"Background and context\",\n story=story, layout=layout,\n root=True)\n section2 = create_section(title=\"Decisions to be made\", story=story, layout=layout)\n section3 = create_section(title=\"Who has been involved\", \n story=story, layout=layout)\n section4 = create_section(title=\"Next steps\", story=story, layout=layout)\n SectionRelation.objects.create(parent=section1, child=section2)\n SectionRelation.objects.create(parent=section2, child=section3)\n SectionRelation.objects.create(parent=section3, child=section4)\n self.assertEqual(story.structure.get_next_section(section1), \n section2)\n self.assertEqual(story.structure.get_next_section(section2),\n section3)\n self.assertEqual(story.structure.get_next_section(section3),\n section4)\n self.assertEqual(story.structure.get_next_section(section4),\n None)", "def test_ordering(self):\r\n def verify_order(source_usage_key, parent_usage_key, source_position=None):\r\n usage_key = self._duplicate_item(parent_usage_key, source_usage_key)\r\n parent = self.get_item_from_modulestore(parent_usage_key)\r\n children = parent.children\r\n if source_position is None:\r\n self.assertFalse(source_usage_key in children, 'source item not expected in children array')\r\n self.assertEqual(\r\n children[len(children) - 1],\r\n usage_key,\r\n \"duplicated item not at end\"\r\n )\r\n else:\r\n self.assertEqual(\r\n children[source_position],\r\n source_usage_key,\r\n \"source item at wrong position\"\r\n )\r\n self.assertEqual(\r\n children[source_position + 1],\r\n usage_key,\r\n \"duplicated item not ordered after source item\"\r\n )\r\n\r\n verify_order(self.problem_usage_key, self.seq_usage_key, 0)\r\n # 2 because duplicate of problem should be located before.\r\n verify_order(self.html_usage_key, self.seq_usage_key, 2)\r\n verify_order(self.seq_usage_key, self.chapter_usage_key, 0)\r\n\r\n # Test duplicating something into a location that is not the parent of the original item.\r\n # Duplicated item should appear at the end.\r\n verify_order(self.html_usage_key, self.usage_key)" ]
[ "0.6678625", "0.65117383", "0.64006776", "0.63972", "0.6224504", "0.6207259", "0.6120042", "0.6104729", "0.5975373", "0.5969546", "0.5953876", "0.59147906", "0.590727", "0.5897399", "0.5894756", "0.58860564", "0.5871923", "0.5813467", "0.580579", "0.58002204", "0.5794069", "0.5787316", "0.57821727", "0.57774264", "0.57737494", "0.57716304", "0.5746302", "0.57402223", "0.57294726", "0.5698403" ]
0.6723676
0
Prepare the file locker. Specify the file to lock and optionally the maximum timeout and the delay between each attempt to lock.
def __init__(self, file_name, timeout=10, delay=.05): self.is_locked = False self.lockfile = os.path.abspath(file_name) self.file_name = file_name self.timeout = timeout self.delay = delay self.fd = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, file_name, timeout=10, delay=.05):\r\n self.is_locked = False\r\n #self.lockfile = os.path.join(os.getcwd(), \"%s.lock\" % file_name)\r\n self.lockfile = file_name + '.lock'\r\n self.file_name = file_name\r\n self.timeout = timeout\r\n self.delay = delay", "def __init__(self, protected_file_path, timeout=None, delay=1, lock_file_contents=None):\n self.is_locked = False\n self.lockfile = protected_file_path + \".lock\"\n self.timeout = timeout\n self.delay = delay\n self._lock_file_contents = lock_file_contents\n if self._lock_file_contents is None:\n self._lock_file_contents = \"Owning process args:\\n\"\n for arg in sys.argv:\n self._lock_file_contents += arg + \"\\n\"", "def __init__(self, file_name, timeout=10, delay=0.05):\n self.file_name = os.path.abspath(file_name)\n self.lockfile = os.path.abspath(file_name) + \".lock\"\n self.timeout = float(timeout)\n self.delay = float(delay)\n self.is_locked = False\n\n if self.delay > self.timeout or self.delay <= 0 or self.timeout <= 0:\n raise ValueError(\"delay and timeout must be positive with delay \" \"<= timeout\")", "def __init__(self, dir, timeout=None):\n self.dir = dir\n if timeout is not None and timeout < 2.0:\n raise ValueError('timeout must be at least 2 seconds')\n self.timeout = timeout\n if self.fileName:\n self.lockDir = os.path.join(dir, self.fileName)\n self._makeDir()\n else:\n self.lockDir = dir \n self._locked = False", "def acquire(self):\n start_time = time.time()\n while True:\n try:\n self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)\n break\n except (OSError,) as e:\n if e.errno != errno.EEXIST:\n raise\n if (time.time() - start_time) >= self.timeout:\n raise FileLockException(f\"{self.lockfile}: Timeout occurred.\")\n time.sleep(self.delay)\n\n self.is_locked = True", "def acquire(self):\r\n start_time = time.time()\r\n import getpass\r\n userName = getpass.getuser()\r\n import platform\r\n computerName = platform.uname()[1]\r\n while True:\r\n try:\r\n self.fd = os.open(self.lockfile, os.O_CREAT|os.O_EXCL|os.O_RDWR)\r\n os.write(self.fd, userName + '\\n')\r\n os.write(self.fd, computerName + '\\n')\r\n os.write(self.fd, time.ctime(time.time()))\r\n break;\r\n except OSError as e:\r\n if e.errno != errno.EEXIST and e.errno != errno.EACCES:\r\n raise \r\n if (time.time() - start_time) >= self.timeout:\r\n if e.errno == errno.EEXIST:\r\n raise FileLockException(\"Timeout occured.\")\r\n else:\r\n raise FileLockException(\"Access denied.\")\r\n time.sleep(self.delay)\r\n self.is_locked = True", "def lock(self, timeout=0):\n if timeout:\n timeout_time = time.time() + timeout\n # Make sure my temp lockfile exists, and that its contents are\n # up-to-date (e.g. the temp file name, and the lock lifetime).\n self.__write()\n # TBD: This next call can fail with an EPERM. I have no idea why, but\n # I'm nervous about wrapping this in a try/except. It seems to be a\n # very rare occurence, only happens from cron, and (only?) on Solaris\n # 2.6.\n self.__touch()\n\n while True:\n # Create the hard link and test for exactly 2 links to the file\n try:\n os.link(self.__tmpfname, self.__lockfile)\n # If we got here, we know we know we got the lock, and never\n # had it before, so we're done. Just touch it again for the\n # fun of it.\n self.__touch()\n break\n except OSError, e:\n # The link failed for some reason, possibly because someone\n # else already has the lock (i.e. we got an EEXIST), or for\n # some other bizarre reason.\n if e.errno == errno.ENOENT:\n # TBD: in some Linux environments, it is possible to get\n # an ENOENT, which is truly strange, because this means\n # that self.__tmpfname doesn't exist at the time of the\n # os.link(), but self.__write() is supposed to guarantee\n # that this happens! I don't honestly know why this\n # happens, but for now we just say we didn't acquire the\n # lock, and try again next time.\n pass\n elif e.errno <> errno.EEXIST:\n # Something very bizarre happened. Clean up our state and\n # pass the error on up.\n os.unlink(self.__tmpfname)\n raise\n elif self.__linkcount() <> 2:\n # Somebody's messin' with us!\n pass\n elif self.__read() == self.__tmpfname:\n # It was us that already had the link.\n raise AlreadyLockedError\n # otherwise, someone else has the lock\n pass\n # We did not acquire the lock, because someone else already has\n # it. Have we timed out in our quest for the lock?\n if timeout and timeout_time < time.time():\n os.unlink(self.__tmpfname)\n raise TimeOutError\n # Okay, we haven't timed out, but we didn't get the lock. Let's\n # find if the lock lifetime has expired.\n if time.time() > self.__releasetime():\n # Yes, so break the lock.\n self.__break()\n # Okay, someone else has the lock, our claim hasn't timed out yet,\n # and the expected lock lifetime hasn't expired yet. So let's\n # wait a while for the owner of the lock to give it up.\n self.__sleep()", "def Lock(self, timeout_secs=_DEFAULT_TIMEOUT_SECS):\n if self._file_desc is not None:\n raise OSError(\"%s has been locked.\" % self._file_path)\n parent_dir = os.path.dirname(self._file_path)\n if not os.path.exists(parent_dir):\n os.makedirs(parent_dir)\n successful = False\n self._file_desc = os.open(self._file_path, os.O_CREAT | os.O_RDWR,\n 0o666)\n try:\n successful = self._Flock(timeout_secs)\n finally:\n if not successful:\n os.close(self._file_desc)\n self._file_desc = None\n return successful", "def acquire(lockfile, timeout=None):\n\tif timeout is None:\n\t\ttimeout = max_timeout # 100yrs should suffice\n\tretries = int(float(timeout)/wait_interval)\n\n\t_lock_acquire(lockfile, retries)\n\t\n\treturn lockfile", "def lock(tmp_dir, timeout=120, min_wait=5, max_wait=10, verbosity=1):\r\n # Create base of lock directory if required.\r\n base_lock = os.path.dirname(tmp_dir)\r\n if not os.path.isdir(base_lock):\r\n try:\r\n os.makedirs(base_lock)\r\n except OSError:\r\n # Someone else was probably trying to create it at the same time.\r\n # We wait two seconds just to make sure the following assert does\r\n # not fail on some NFS systems.\r\n time.sleep(2)\r\n assert os.path.isdir(base_lock)\r\n\r\n # Variable initialization.\r\n lock_file = os.path.join(tmp_dir, 'lock')\r\n random.seed()\r\n my_pid = os.getpid()\r\n no_display = (verbosity == 0)\r\n\r\n nb_error = 0\r\n # The number of time we sleep when their is no errors.\r\n # Used to don't display it the first time to display it less frequently.\r\n # And so don't get as much email about this!\r\n nb_wait = 0\r\n # Acquire lock.\r\n while True:\r\n try:\r\n last_owner = 'no_owner'\r\n time_start = time.time()\r\n other_dead = False\r\n while os.path.isdir(tmp_dir):\r\n try:\r\n read_owner = open(lock_file).readlines()[0].strip()\r\n # the try is transtion code for old locks\r\n # it may be removed when poeple have upgraded\r\n try:\r\n other_host = read_owner.split('_')[2]\r\n except IndexError:\r\n other_host = () # make sure it isn't equal to any host\r\n if other_host == socket.gethostname():\r\n try:\r\n os.kill(int(read_owner.split('_')[0]), 0)\r\n except OSError:\r\n other_dead = True\r\n except AttributeError:\r\n pass #os.kill does not exist on windows\r\n except Exception:\r\n read_owner = 'failure'\r\n if other_dead:\r\n if not no_display:\r\n msg = \"process '%s'\" % read_owner.split('_')[0]\r\n _logger.warning(\"Overriding existing lock by dead %s \"\r\n \"(I am process '%s')\", msg, my_pid)\r\n get_lock.unlocker.unlock()\r\n continue\r\n if last_owner == read_owner:\r\n if (timeout is not None and\r\n time.time() - time_start >= timeout):\r\n # Timeout exceeded or locking process dead.\r\n if not no_display:\r\n if read_owner == 'failure':\r\n msg = 'unknown process'\r\n else:\r\n msg = \"process '%s'\" % read_owner.split('_')[0]\r\n _logger.warning(\"Overriding existing lock by %s \"\r\n \"(I am process '%s')\", msg, my_pid)\r\n get_lock.unlocker.unlock()\r\n continue\r\n else:\r\n last_owner = read_owner\r\n time_start = time.time()\r\n no_display = (verbosity == 0)\r\n if not no_display and nb_wait > 0:\r\n if read_owner == 'failure':\r\n msg = 'unknown process'\r\n else:\r\n msg = \"process '%s'\" % read_owner.split('_')[0]\r\n _logger.info(\"Waiting for existing lock by %s (I am \"\r\n \"process '%s')\", msg, my_pid)\r\n _logger.info(\"To manually release the lock, delete %s\",\r\n tmp_dir)\r\n if verbosity <= 1:\r\n no_display = True\r\n nb_wait += 1\r\n time.sleep(random.uniform(min_wait, max_wait))\r\n\r\n try:\r\n os.mkdir(tmp_dir)\r\n except OSError:\r\n # Error while creating the directory: someone else\r\n # must have tried at the exact same time.\r\n nb_error += 1\r\n if nb_error < 10:\r\n continue\r\n else:\r\n raise\r\n # Safety check: the directory should be here.\r\n assert os.path.isdir(tmp_dir)\r\n\r\n # Write own id into lock file.\r\n unique_id = refresh_lock(lock_file)\r\n\r\n # Verify we are really the lock owner (this should not be needed,\r\n # but better be safe than sorry).\r\n owner = open(lock_file).readlines()[0].strip()\r\n if owner != unique_id:\r\n # Too bad, try again.\r\n continue\r\n else:\r\n # We got the lock, hoorray!\r\n return\r\n\r\n except Exception, e:\r\n # If something wrong happened, we try again.\r\n _logger.warning(\"Something wrong happened: %s %s\", type(e), e)\r\n nb_error += 1\r\n if nb_error > 10:\r\n raise\r\n time.sleep(random.uniform(min_wait, max_wait))\r\n continue", "def __init__(self, dir, timeout=None):\n ExclusiveLock.__init__(self, dir, timeout)\n writeLockDir = os.path.join(self.dir, WriteLock.fileName)\n self.writeLock = ExclusiveLock(writeLockDir, timeout)", "def acquire(self, timeout=None):\r\n try:\r\n open(self.unique_name, \"wb\").close()\r\n except IOError:\r\n raise LockFailed\r\n\r\n end_time = time.time()\r\n if timeout is not None and timeout > 0:\r\n end_time += timeout\r\n\r\n while True:\r\n # Try and create a hard link to it.\r\n try:\r\n os.link(self.unique_name, self.lock_file)\r\n except OSError:\r\n # Link creation failed. Maybe we've double-locked?\r\n nlinks = os.stat(self.unique_name).st_nlink\r\n if nlinks == 2:\r\n # The original link plus the one I created == 2. We're\r\n # good to go.\r\n return\r\n else:\r\n # Otherwise the lock creation failed.\r\n if timeout is not None and time.time() > end_time:\r\n os.unlink(self.unique_name)\r\n if timeout > 0:\r\n raise LockTimeout\r\n else:\r\n raise AlreadyLocked\r\n time.sleep(timeout is not None and timeout/10 or 0.1)\r\n else:\r\n # Link creation succeeded. We're good to go.\r\n return", "def acquire(self):\n start_time = time.time()\n while True:\n # 当前文件锁对象未有加锁,执行加锁\n if self.fd is None:\n try:\n # 独占式打开文件\n lock_dir = os.path.dirname(self.lockfile)\n if not os.path.isdir(lock_dir):\n os.makedirs(lock_dir, exist_ok=True)\n self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)\n break\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n # 超时\n if (time.time() - start_time) >= self.timeout:\n raise FileLockException(\"Timeout occured.\")\n # 本次加锁失败,需要等待\n time.sleep(self.delay)\n self.is_locked = True", "def __init__(self, dir, timeout=None, readlocktimeout=None):\n ExclusiveLock.__init__(self, dir, timeout)\n if readlocktimeout is None:\n self.readlocktimeout = timeout\n else:\n self.readlocktimeout = readlocktimeout", "def acquire(self, timeout=None):\n if self._locked:\n raise RuntimeError(\"lock already locked\")\n if self.writeLock.acquire(timeout):\n try:\n self.lockDir = tempfile.mkdtemp('', self.fileName, self.dir)\n self._locked = True\n # log('acquired read lock: %s\\n' % self.lockDir)\n return True\n finally:\n self.writeLock.release() \n return False", "def __init__(self, lockfile, lifetime=DEFAULT_LOCK_LIFETIME):\n self.__lockfile = lockfile\n self.__lifetime = lifetime\n # This works because we know we're single threaded\n self.__counter = LockFile.COUNTER\n LockFile.COUNTER += 1\n self.__tmpfname = '%s.%s.%d.%d' % (lockfile, \n socket.gethostname(),\n os.getpid(),\n self.__counter)", "def testUnlockWait(t, env):\n c = env.c1\n c.init_connection()\n fh, stateid = c.create_confirm(t.code)\n res = c.lock_file(t.code, fh, stateid, 20, 100)\n check(res, msg=\"Locking file %s\" % t.code)\n sleeptime = c.getLeaseTime() * 2\n env.sleep(sleeptime)\n ops = c.use_obj(fh)\n ops += [c.locku_op(READ_LT, 1, res.lockid, 0, 0xffffffffffffffff)]\n _replay(c, ops, [NFS4_OK, NFS4ERR_EXPIRED])", "def _file_open_rlock(self, preset_type, timeout=1.0):\n\n if self._fd is None:\n path = self._path(preset_type)\n with open(path, 'r+') as fd:\n # Set up file lock timeout with a raising handler\n # We will need this handler due to PEP 475\n def interrupt(signum, frame):\n raise InterruptedError()\n\n old_handler = signal.signal(signal.SIGALRM, interrupt)\n try:\n signal.setitimer(signal.ITIMER_REAL, timeout)\n fcntl.flock(fd, fcntl.LOCK_EX)\n except InterruptedError:\n # Ignore interrupted and proceed to cleanup\n pass\n finally:\n # Clean up file lock timeout\n signal.setitimer(signal.ITIMER_REAL, 0)\n signal.signal(signal.SIGALRM, old_handler)\n # Error now if we still can't get the lock.\n # Getting lock twice is safe.\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\n logger.debug('acquired lock for %s', path)\n self._fd = fd\n yield fd\n fcntl.flock(fd, fcntl.LOCK_UN)\n logger.debug('released lock for %s', path)\n self._fd = None\n else:\n logger.debug('using already open file descriptor')\n yield self._fd", "def __init__(self, fname, lockduration=10, verbosity=0):\n self._verbosity = verbosity\n self._lockduration = lockduration\n fname = op.normpath(fname)\n self._basedir = op.dirname(fname)\n self._lockfilename = \"%s.lock\" % op.basename(fname)\n self._uniquename = \",\".join((\n self._lockfilename, socket.getfqdn(), str(os.getpid()),\n str(uuid.uuid4())[-12:],\n ))\n self._uniquefile_created = False\n self._p(\" File to lock: %s\" % fname)\n self._p(\"Lockfile name: %s\" % self._lockfilename)\n self._p(\" Unique name: %s\" % self._uniquename)", "def testLock(t, env):\n c = env.c1\n c.init_connection()\n # Create a file and partially lock it\n fh, stateid = c.create_confirm(t.code)\n res = c.lock_file(t.code, fh, stateid, 20, 100)\n check(res, msg=\"Locking file %s\" % t.code)\n # Create and replay LOCK ops\n ops = c.use_obj(fh)\n lock_owner = exist_lock_owner4(res.lockid, 1)\n locker = locker4(FALSE, lock_owner=lock_owner)\n ops += [c.lock_op(WRITE_LT, FALSE, 0, 10, locker)]\n _replay(c, ops)", "def FSLockExclusive(filepath, timeout=None):\n return _lock(filepath, True, timeout=timeout)", "def _wait_for_lockfile(self, lockfile_path: Path) -> None:\n if not lockfile_path.exists():\n return\n\n # The first second is free.\n start = time.time()\n time.sleep(1)\n if not lockfile_path.exists():\n return\n\n # After the first second, we print one message, then we stay silent for 10 minutes, at\n # which time we print a message every minute.\n def time_elapsed() -> float:\n return time.time() - start\n self.logger.info(\"Starting to wait for %s\", lockfile_path)\n next_message_time = time.time() + 16 * 60\n while lockfile_path.exists():\n if next_message_time - time.time() < 0:\n self.logger.warning(\n \"Lockfile %s has been blocked for %.0f seconds\",\n lockfile_path,\n time_elapsed())\n next_message_time = time.time() + 60\n time.sleep(1)", "def _Flock(self, timeout_secs):\n try:\n if timeout_secs > 0:\n wrapper = utils.TimeoutException(timeout_secs)\n wrapper(fcntl.flock)(self._file_desc, fcntl.LOCK_EX)\n else:\n fcntl.flock(self._file_desc, fcntl.LOCK_EX | fcntl.LOCK_NB)\n except errors.FunctionTimeoutError as e:\n logger.debug(\"Cannot lock %s within %s seconds\",\n self._file_path, timeout_secs)\n return False\n except (OSError, IOError) as e:\n # flock raises IOError in python2; OSError in python3.\n if e.errno in (errno.EACCES, errno.EAGAIN):\n logger.debug(\"Cannot lock %s\", self._file_path)\n return False\n raise\n return True", "def test_waits_on_existing_lockfile(self):\n self.lock.__enter__()\n self.assertTrue(os.path.exists(self.lock.lockfile_path))\n\n def exit_first_lock():\n time.sleep(0.1)\n self.lock.__exit__(None, None, None)\n thread = threading.Thread(target=exit_first_lock)\n thread.start()\n\n new_lock = disk.DiskDatasetLock(self.dataset, timeout_sec=1)\n new_lock.__enter__()\n\n thread.join()", "def acquire(path, onwait=None):\r\n\r\n touch(path)\r\n lock_fd = lock_file(path, blocking=False)\r\n if not lock_fd:\r\n blocking = True\r\n with open(path, 'r') as fd:\r\n pid = int(fd.read().strip())\r\n if onwait:\r\n blocking = onwait(pid)\r\n if not blocking:\r\n return None\r\n lock_fd = lock_file(path, blocking=blocking)\r\n\r\n lock_fd.truncate(0)\r\n lock_fd.write('%d\\n' % os.getpid())\r\n lock_fd.flush()\r\n return Lock(lock_fd)", "def _lock_process(pipe, filepath, exclusive, timeout=None):\n try:\n # Reset signal handlers\n signal.signal(signal.SIGINT, signal.SIG_DFL)\n signal.signal(signal.SIGHUP, signal.SIG_DFL)\n signal.signal(signal.SIGTERM, signal.SIG_DFL)\n\n # Open the file\n mode = os.O_RDONLY | os.O_CREAT if exclusive else os.O_RDONLY\n try:\n fd = os.open(filepath, mode)\n except FileNotFoundError:\n pipe.send('NOTFOUND')\n return\n\n # Lock it\n op = fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH\n if timeout is None:\n fcntl.flock(fd, op)\n elif timeout == 0:\n try:\n fcntl.flock(fd, op | fcntl.LOCK_NB)\n except BlockingIOError:\n pipe.send('TIMEOUT')\n return\n else:\n with timeout_syscall(timeout):\n try:\n fcntl.flock(fd, op)\n except InterruptedError:\n pipe.send('TIMEOUT')\n return\n pipe.send('LOCKED')\n except Exception:\n pipe.send('ERROR')\n raise\n\n # Wait for unlock message then exit\n assert pipe.recv() == 'UNLOCK'\n\n # Exiting releases the lock", "def acquire(self, timeout=None):\n timer = self.timerClass(timeout)\n timer.start()\n while timer.haveTime():\n try:\n os.mkdir(self.lockDir)\n self._locked = True\n # log('acquired exclusive lock: %s\\n' % (self.lockDir, ))\n return True\n except OSError, err:\n if err.errno != errno.EEXIST:\n raise\n if self.expire():\n continue # Try immediately to acquire\n timer.sleep()\n return False", "def AcquireFileLock(target_file, flags):\n assert flags in (\n LOCK_EX, LOCK_SH, LOCK_NB, LOCK_EX | LOCK_NB, LOCK_SH | LOCK_NB)\n if os.name == 'nt':\n _LockImplWin(target_file, flags)\n elif os.name == 'posix':\n _LockImplPosix(target_file, flags)\n else:\n raise NotImplementedError('%s is not supported' % os.name)", "def make_pidlockfile(path, acquire_timeout):\n if not isinstance(path, basestring):\n error = ValueError(\"Not a filesystem path: %(path)r\" % vars())\n raise error\n if not os.path.isabs(path):\n error = ValueError(\"Not an absolute path: %(path)r\" % vars())\n raise error\n lockfile = pidlockfile.TimeoutPIDLockFile(path, acquire_timeout)\n\n return lockfile", "def test_locked_file_03(self):\n \n f = open(\"tests/locked.db3\", \"a+\")\n fcntl.lockf(f.fileno(), fcntl.LOCK_EX) \n \n x = subprocess.Popen([\"sqlbak\", \"tests\", \"--ms-towait=4000\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n\n time.sleep(3)\n fcntl.lockf(f.fileno(), fcntl.LOCK_UN)\n f.close()\n\n result = x.communicate()[0]\n\n self.assertTrue(\"cannot obtain lock\" not in result)" ]
[ "0.7275898", "0.6982331", "0.6930079", "0.67412364", "0.6593794", "0.64104044", "0.63795364", "0.6343358", "0.6310697", "0.6309369", "0.6213117", "0.60422367", "0.5983077", "0.59159404", "0.58600146", "0.5779379", "0.5777742", "0.5752242", "0.56741214", "0.56453484", "0.56215525", "0.5606763", "0.5593011", "0.556964", "0.5542238", "0.54823834", "0.5481801", "0.5477895", "0.5457725", "0.5418548" ]
0.7269806
1
View the content of the given `TreeManager` in the LSpace eco_viewer tool
def view_in_lspace(tree_manager, lspace_root=None): node = tree_manager.lines[0].node # first node doc = EcoDoc() while True: node = node.next_term if isinstance(node.symbol, IndentationTerminal): continue if isinstance(node, EOS): lbnode = tree_manager.get_languagebox(node) if lbnode: node = lbnode continue else: break if isinstance(node.symbol, MagicTerminal): node = node.symbol.ast.children[0] continue if node.symbol.name == "\r": doc.new_line() else: doc.token(EcoDocToken(node.symbol.name)) return viewer(doc, lspace_root=lspace_root)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def View_Tree_1(self):\r\n self.system.Set_Tree_View_Mode(1)", "def tree():\n nobv.visual_tree()", "def View_Tree_2(self):\r\n self.system.Set_Tree_View_Mode(2)", "def View_Tree_3(self):\r\n self.system.Set_Tree_View_Mode(3)", "def treeExplorer(**kwds):\n from .TreeExplorer import TreeExplorer\n\n return TreeExplorer(**kwds)", "def View_Tree_0(self):\r\n self.system.Set_Tree_View_Mode(0)", "def watch_show_tree(self, show_tree: bool) -> None:\n self.set_class(show_tree, \"-show-tree\")", "def test_tree(self):\n root = role_middleware.get_root()\n tree_list = role_middleware.get_tree(root.id)\n role_middleware.force_refresh()\n print(tree_list)", "def showManagerDialog(self,c):\n if not self.plugin_manager:\n g.es(\"Plugin manager could not be loaded\", color=\"red\")\n else:\n #\n # The manager class is defined as a dynamic class because\n # we don't know if we will be able to import the \n # base class!\n #@+<< class HandlerDialog >>\n #@+node:ekr.20050329082101.143: *4* << class HandlerDialog >>\n class HandlerDialog(self.plugin_manager.ManagerDialog):\n \"\"\"A dialog to manager tree handlers\"\"\"\n\n dialog_caption = \"AutoTree Handler Manager\"\n\n #@+others\n #@+node:ekr.20060107092231: *5* ctor\n def __init__ (self,c):\n\n self.c = c\n #@+node:ekr.20050329082101.144: *5* setPaths\n def setPaths(self):\n\n \"\"\"Set paths to the plugin locations\"\"\"\n self.local_path = g.os_path_join(g.app.loadDir,\"..\",\"plugins\",\"trees\")\n # self.remote_path = r\"cvs.sourceforge.net/viewcvs.py/leo/leo/plugins/trees\"\n self.remote_path = r'leo.tigris.org/source/browse/leo/plugins/trees'\n #@-others\n #@-<< class HandlerDialog >>\n dlg = HandlerDialog(c)", "def print_tree(self, tree, nodes):\n\t\tprint(self.display(tree, nodes, '', True, ''))", "def osd_tree(self):\n return self.mon_command(cmd='osd tree')", "def plotTree(self):\n t = self.make(self.tree)\n t.draw()", "def visualize_tree(root):\n _visualize_tree(root, [], 0, '-')", "def show_tree(self):\n G, vertex_dict = self.tree().graph()\n root = self.tree().root()\n vertical_list = []\n horizontal_list = []\n no_component_list = []\n for i, xi in vertex_dict.items():\n if xi.is_equal(root):\n root_index = i\n if self.is_component(xi):\n if xi.type() == \"II\":\n vertical_list.append(i)\n else:\n horizontal_list.append(i)\n print(i, \": \", xi)\n else:\n no_component_list.append(i)\n vertex_colors = {'red': vertical_list, 'blue': horizontal_list,\n 'grey': no_component_list}\n G.show(vertex_colors=vertex_colors, tree_root=root_index, layout='tree')", "def Draw_Tree( self, rooted_tree, menuoptions = 0, editor = 0 ):\r\n #Clear the previous information\r\n self.Reset_Selection()\r\n self.canvas_one.delete( ALL )\r\n self.canvas_two.delete( ALL )\r\n self.handle_list = []\r\n \r\n if editor:\r\n self.Adjust_Menu( menuoptions )\r\n #if no node\r\n if( rooted_tree == 0 ):\r\n self.canvas_one.create_text( cb.xorigin, 5, text=\"There is no tree to display\", anchor = NW )\r\n ys = 0\r\n #one node\r\n elif( rooted_tree.sub == [] ):\r\n #if there is only one node, make its length one because a zero length will not show up\r\n store = rooted_tree.data.length\r\n rooted_tree.data.length = 1\r\n xlong = rooted_tree.Longest_Branch( )\r\n cb.New_XLong( xlong )\r\n ys = self.Draw_Node( rooted_tree, cb.xorigin, cb.yorigin)\r\n rooted_tree.data.length = store\r\n else:\r\n #recursively draw the tree, temporarily store the root's length and make it zero\r\n #If the root is long(Isolated), it does not squish the rest of the data\r\n store = rooted_tree.data.length\r\n rooted_tree.data.length = 0\r\n #Get the longest distance from root to leaf\r\n xlong = rooted_tree.Longest_Branch( )\r\n cb.New_XLong( xlong ) #Change the scale\r\n ys, ypos1 = self.Rec_Draw_Tree( rooted_tree, cb.xorigin, cb.yorigin )\r\n #Extend the root node so that it is visible\r\n ls = self.Find_Line_By_Node( rooted_tree )\r\n self.canvas_one.coords( ls.line_handle, cb.xorigin-5, ypos1, cb.xorigin, ypos1 )\r\n rooted_tree.data.length = store #restore the root node's length\r\n ys = ys + cb.ytick\r\n self.canvas_one.create_text(20,ys,text=\"_____\")\r\n self.canvas_two.create_text(20,ys,text=\"_____\") #end markers\r\n #Set the scrollregions of the canvases\r\n ys = ys + cb.ytick\r\n self.ys = ys + 0*cb.ytick\r\n self.canvas_one.config( scrollregion = ( 0, 0, 300, self.ys ) )\r\n self.canvas_two.config( scrollregion = ( 0, 0, 300, self.ys ) )\r\n self.Draw_Scale()", "def drawtree(self):\r\n\r\n Phylo.draw(self.tree)", "def treeView(*args, addItem: Union[List[AnyStr, AnyStr], List[List[AnyStr, AnyStr]]]=None,\n allowDragAndDrop: bool=True, allowHiddenParents: bool=True, allowMultiSelection:\n bool=True, allowReparenting: bool=True, annotation: Union[AnyStr, bool]=\"\",\n attachButtonRight: int=0, backgroundColor: Union[List[float, float, float],\n bool]=None, borderHighlite: List[AnyStr, bool]=None, borderHighliteColor:\n List[AnyStr, float, float, float]=None, buttonErase: Union[List[AnyStr, bool],\n List[List[AnyStr, bool]]]=None, buttonState: Union[List[AnyStr, int, AnyStr],\n List[List[AnyStr, int, AnyStr]]]=None, buttonStyle: Union[List[AnyStr, int,\n AnyStr], List[List[AnyStr, int, AnyStr]]]=None, buttonTextIcon: Union[List[AnyStr,\n int, AnyStr], List[List[AnyStr, int, AnyStr]]]=None, buttonTooltip:\n Union[List[AnyStr, int, AnyStr], List[List[AnyStr, int, AnyStr]]]=None,\n buttonTransparencyColor: Union[List[AnyStr, int, float, float, float],\n List[List[AnyStr, int, float, float, float]]]=None, buttonTransparencyOverride:\n Union[List[AnyStr, int, bool], List[List[AnyStr, int, bool]]]=None, buttonVisible:\n Union[List[AnyStr, int, bool], List[List[AnyStr, int, bool]]]=None, children:\n Union[AnyStr, bool]=\"\", clearSelection: bool=True, contextMenuCommand: Script=None,\n defineTemplate: AnyStr=\"\", displayLabel: Union[List[AnyStr, AnyStr],\n List[List[AnyStr, AnyStr]]]=None, displayLabelSuffix: Union[List[AnyStr, AnyStr],\n List[List[AnyStr, AnyStr]]]=None, docTag: Union[AnyStr, bool]=\"\",\n dragAndDropCommand: Script=None, dragCallback: Script=None, dropCallback:\n Script=None, editLabelCommand: Script=None, enable: bool=True, enableBackground:\n bool=True, enableButton: Union[List[AnyStr, int, int], List[List[AnyStr, int,\n int]]]=None, enableKeyboardFocus: bool=True, enableKeys: bool=True, enableLabel:\n List[AnyStr, int]=None, exists: bool=True, expandCollapseCommand: Script=None,\n expandItem: List[AnyStr, bool]=None, flatButton: Union[int, bool]=0, font:\n Union[List[AnyStr, AnyStr], bool]=None, fontFace: List[AnyStr, int]=None,\n fullPathName: bool=True, height: Union[int, bool]=0, hideButtons: bool=True,\n highlightColor: Union[List[float, float, float], bool]=None, highlite: List[AnyStr,\n bool]=None, highliteColor: List[AnyStr, float, float, float]=None,\n ignoreButtonClick: Union[List[AnyStr, int, int], List[List[AnyStr, int,\n int]]]=None, image: Union[List[AnyStr, int, AnyStr], List[List[AnyStr, int,\n AnyStr]]]=None, insertItem: Union[List[AnyStr, AnyStr, int], List[List[AnyStr,\n AnyStr, int]]]=None, isItemExpanded: Union[AnyStr, bool]=\"\", isLeaf: Union[AnyStr,\n bool]=\"\", isObscured: bool=True, item: Union[AnyStr, bool]=\"\", itemAnnotation:\n Union[List[AnyStr, AnyStr], bool]=None, itemDblClickCommand: Script=None,\n itemDblClickCommand2: Script=None, itemExists: Union[AnyStr, bool]=\"\", itemIndex:\n Union[AnyStr, bool]=\"\", itemParent: Union[AnyStr, bool]=\"\", itemRenamedCommand:\n Script=None, itemSelected: Union[AnyStr, bool]=\"\", itemVisible: List[AnyStr,\n bool]=None, labelBackgroundColor: List[AnyStr, float, float, float]=None, manage:\n bool=True, noBackground: bool=True, numberOfButtons: int=0, numberOfPopupMenus:\n bool=True, ornament: List[AnyStr, int, int, int]=None, ornamentColor: List[AnyStr,\n float, float, float]=None, parent: Union[AnyStr, bool]=\"\", popupMenuArray:\n bool=True, pressCommand: Union[List[int, Script], List[List[int, Script]]]=None,\n preventOverride: bool=True, removeAll: bool=True, removeItem: AnyStr=\"\",\n reverseTreeOrder: bool=True, rightPressCommand: Union[List[int, Script],\n List[List[int, Script]]]=None, select: List[AnyStr, int]=None, selectCommand:\n Script=None, selectItem: List[AnyStr, bool]=None, selectionChangedCommand:\n Script=None, selectionColor: Union[List[AnyStr, float, float, float], bool]=None,\n showItem: AnyStr=\"\", statusBarMessage: AnyStr=\"\", textColor: List[AnyStr, float,\n float, float]=None, useTemplate: AnyStr=\"\", visible: bool=True,\n visibleChangeCommand: Union[Script, bool]=None, width: Union[int, bool]=0, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def show(self):\n if self._tree is None:\n raise RuntimeError(\"Estimator not fitted, call `fit` first\")\n\n import tree_plotter\n tree_plotter.createPlot(self._tree)", "def plot_mcc_tree():\n t = ete2.Tree(\"mcct.nex\")\n ts = ete2.treeview.TreeStyle()\n ts.show_scale = False\n ts.show_leaf_name = False\n ts.show_branch_support = False\n ts.scale = 500\n margin = 10\n ts.margin_top = margin\n ts.margin_bottom = margin\n ts.margin_left = margin\n ts.margin_right = margin\n\n germ_style = ete2.NodeStyle()\n germ_style[\"bgcolor\"] = \"LightSteelBlue\"\n proto_germ = t.get_common_ancestor(\"Danish\", \"Norwegian\",\"Icelandic\",\"Swedish\", \"Dutch\", \"German\", \"English\")\n proto_germ.set_style(germ_style)\n\n bs_style = ete2.NodeStyle()\n bs_style[\"bgcolor\"] = \"Moccasin\"\n proto_bs = t.get_common_ancestor(\"Bulgarian\", \"Czech\",\"Polish\",\"Russian\")\n proto_bs.set_style(bs_style)\n\n ital_style = ete2.NodeStyle()\n ital_style[\"bgcolor\"] = \"DarkSeaGreen\"\n proto_ital = t.get_common_ancestor(\"French\", \"Romanian\", \"Italian\", \"Portuguese\", \"Spanish\")\n proto_ital.set_style(ital_style)\n\n t.render(\"mcct.eps\", style_func, tree_style=ts, dpi=600, units=\"px\", w=2250)", "def context_menu(self, treeview, position):\n\n all_item = get_current_item(self,treeview,single=False)\n\n if len(all_item) == 1:\n\n item = all_item[0]\n data = get_group_data(get_current_hdf5_group(self,item))\n\n if data is None:\n list_operations = ['Print attrs', 'PyMol']\n\n elif data.ndim == 1:\n list_operations = ['Print attrs','-','Plot Hist', 'Plot Line']\n\n elif data.ndim == 2:\n list_operations = ['Print attrs','-','Plot Hist', 'Plot Map']\n\n else:\n list_operations = ['Print attrs']\n\n action,actions = get_actions(treeview,position,list_operations)\n #action, actions = get_multilevel_actions(treeview,position,list_operations,list_sub)\n\n if action == actions['Print attrs']:\n send_dict_to_console(self,item,treeview)\n\n if 'Plot Hist' in actions:\n if action == actions['Plot Hist']:\n plot_histogram(self,item,treeview)\n\n if 'Plot Line' in actions:\n if action == actions['Plot Line']:\n plot_line(self,item,treeview)\n\n if 'Plot Map' in actions:\n if action == actions['Plot Map']:\n plot2d(self,item,treeview)\n\n if 'PyMol' in actions:\n if action == actions['PyMol']:\n\n grp = get_current_hdf5_group(self,item)\n data_dict = {'_grp':grp}\n treeview.emitDict.emit(data_dict)\n\n cmd = 'launchPyMol(_grp)'\n data_dict = {'exec_cmd':cmd}\n treeview.emitDict.emit(data_dict)", "def printTree(self):\n print self.storeTree.movies", "def vtk_viewer(request):\n try:\n data = _refresh(request)\n except Exception:\n data = {}\n data['main'] = 'main'\n data['error'] = 'error'\n data['search'] = {\n 'help': ''\n }\n options = {\n 'resizable': True\n }\n data['options'] = mark_safe(json.dumps(options))\n return render(\n request,\n 'vtk_view/cdat_viewer.html',\n data\n )", "def main():\n root = Node(1)\n root.left = Node(2)\n root.right = Node(3)\n root.left.left = Node(4)\n root.left.right = Node(5)\n root.right.left = Node(6)\n root.right.right = Node(7)\n\n v = View()\n v.top_view(root)", "def display_tree(self, tree_node, spacing=\"\"):\n if tree_node is None:\n return\n else:\n print(spacing + str(tree_node.val))\n spacing += \" \"\n self.display_tree(tree_node.left, spacing)\n self.display_tree(tree_node.right, spacing)", "def showEditorInfo(self, editor):\n documentationViewer = self.ui.documentationViewer()\n if documentationViewer:\n documentationViewer.showInfo(editor)", "def showFileTree():\n\treturn 0", "def open_viewer(self):\r\n choice = self.thoughts_lst.get(tk.ACTIVE)\r\n subject = self.refference[choice]\r\n tbl = self.home_table[subject]\r\n view = kit.SQL_pull('*', tbl, 'subject_id = \"{}\"'.format(subject))\r\n obj = kit.class_fill(tbl, view[0])\r\n self.session = tk.Toplevel(self.master, **jt.bframe_style)\r\n jv.Viewer(self.session, obj)", "def view_system():\n\n pass", "def refresh(self, view_manager):\n pass", "def plot_dollo_ml_tree(tree, nodes):\n leaf_order = []\n for leaf in tree.leaves:\n leaf.plot_id = leaf.name\n leaf_order.append(leaf.name)\n\n origin_counts = nodes.groupby('node')['ml_origin'].sum()\n\n for node in tree.nodes:\n node.origin_count = origin_counts[node.label]\n\n loss_counts = nodes.groupby('node')['ml_loss'].sum()\n\n width = 1 + 0.5 * float(len(list(tree.leaves)))\n fig = plt.figure(figsize=(width/1.5, 6))\n\n ax = fig.add_subplot(111)\n\n def func(x, pos):\n s = '{:0,d}'.format(int(x))\n return s\n ax.yaxis.set_major_formatter(matplotlib.ticker.FuncFormatter(func))\n\n wgs_analysis.plots.trees.plot_tree(ax, tree, landscape=False, flip=True, branch_length_attr='origin_count', leaf_name_attr='plot_id')\n\n ax.set_ylabel('SNV count')\n\n plt.tight_layout()" ]
[ "0.6410106", "0.62578595", "0.6140295", "0.6094907", "0.5861213", "0.5749762", "0.5636365", "0.5575719", "0.5533464", "0.55283636", "0.5482779", "0.5446668", "0.5417185", "0.5394786", "0.5394212", "0.53881365", "0.5326237", "0.53217983", "0.53179187", "0.5317833", "0.53139853", "0.5313154", "0.5312796", "0.5312333", "0.5306449", "0.53010035", "0.527672", "0.5276668", "0.5246255", "0.52407557" ]
0.7159866
0
Upgrade Nodes if mixed_version is True, only upgrade those nodes that are specified by nodes, otherwise ignore nodes specified and upgrade all nodes.
def upgrade_to_version(self, version, mixed_version=False, nodes=None): debug('Upgrading to ' + version) if not mixed_version: nodes = self.cluster.nodelist() for node in nodes: debug('Prepping node for shutdown: ' + node.name) node.flush() self._check_values() self._check_counter_values() for node in nodes: debug('Shutting down node: ' + node.name) time.sleep(.5) node.stop(wait_other_notice=False) if ENABLE_VNODES and version >= "1.2": self.cluster.set_configuration_options(values={'initial_token': None, 'num_tokens': 256}) for node in nodes: debug('Upgrading node: ' + node.name) node.set_cassandra_dir(cassandra_version=version) node.start(wait_other_notice=True) time.sleep(.5) if not mixed_version: node.nodetool('upgradesstables') if ENABLE_VNODES and version >= "1.2" and not mixed_version: debug("Running shuffle") self.node2.shuffle("create") self.node2.shuffle("en") for node in nodes: debug('Checking node: ' + node.name) if not mixed_version: self._write_values() self._check_values() self._increment_counter_value() time.sleep(0.5) self._check_counter_values() if not mixed_version: # Check we can bootstrap a new node on the upgraded cluster: debug("Adding a node to the cluster") self.cluster.set_cassandra_dir(cassandra_version=version) nnode = new_node(self.cluster, remote_debug_port=str(2000+len(self.cluster.nodes))) nnode.start(no_wait=False) nnode.watch_log_for("Bootstrap completed!") debug("node should be up, but sleeping a bit to ensure...") time.sleep(15) self._check_values() self._check_counter_values() if mixed_version: debug('Successfully upgraded part of the cluster to %s' % version) else: debug('Successfully upgraded to %s' % version)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_nodes(self):\n raise NotImplementedError('ERROR: sweeper has to implement update_nodes(self)')", "def update_nodes(self, weights=None, hive_instance=None):\n hive = hive_instance or shared_hive_instance()\n metadata = None\n account = None\n cnt = 0\n while metadata is None and cnt < 5:\n cnt += 1\n try:\n account = Account(\"fullnodeupdate\", hive_instance=hive)\n metadata = json.loads(account[\"json_metadata\"])\n except:\n hive.rpc.next()\n account = None\n metadata = None\n if metadata is None:\n return\n report = metadata[\"report\"]\n failing_nodes = metadata[\"failing_nodes\"]\n parameter = metadata[\"parameter\"]\n benchmarks = parameter[\"benchmarks\"]\n if weights is None:\n weights_dict = {}\n for benchmark in benchmarks:\n weights_dict[benchmark] = (1. / len(benchmarks))\n elif isinstance(weights, list):\n weights_dict = {}\n i = 0\n weight_sum = 0\n for w in weights:\n weight_sum += w\n for benchmark in benchmarks:\n if i < len(weights):\n weights_dict[benchmark] = weights[i] / weight_sum\n else:\n weights_dict[benchmark] = 0.\n i += 1\n elif isinstance(weights, dict):\n weights_dict = {}\n i = 0\n weight_sum = 0\n for w in weights:\n weight_sum += weights[w]\n for benchmark in benchmarks:\n if benchmark in weights:\n weights_dict[benchmark] = weights[benchmark] / weight_sum\n else:\n weights_dict[benchmark] = 0.\n\n max_score = len(report) + 1\n new_nodes = []\n for node in self:\n new_node = node.copy()\n for report_node in report:\n if node[\"url\"] == report_node[\"node\"]:\n new_node[\"version\"] = report_node[\"version\"]\n scores = []\n for benchmark in benchmarks:\n result = report_node[benchmark]\n rank = result[\"rank\"]\n if not result[\"ok\"]:\n rank = max_score + 1\n score = (max_score - rank) / (max_score - 1) * 100\n weighted_score = score * weights_dict[benchmark]\n scores.append(weighted_score)\n sum_score = 0\n for score in scores:\n sum_score += score\n new_node[\"score\"] = sum_score\n for node_failing in failing_nodes:\n if node[\"url\"] == node_failing:\n new_node[\"score\"] = -1\n new_nodes.append(new_node)\n super(NodeList, self).__init__(new_nodes)", "def test_upgrade_apply_all_fine(setup, platform, skuba):\n\n setup_kubernetes_version(skuba)\n\n # node upgrade apply\n outs = {}\n for (r, n) in [(\"master\", 0), (\"worker\", 0)]:\n node = \"my-{}-{}\".format(r, n)\n outs[node] = skuba.node_upgrade(\"apply\", r, n)\n\n master = outs[\"my-master-0\"]\n assert master.find(\n \"Node my-master-0 is up to date\"\n ) != -1\n\n worker = outs[\"my-worker-0\"]\n assert worker.find(\n \"Node my-worker-0 is up to date\"\n ) != -1", "def test_check_package_versions(local_salt_client, nodes_in_group):\n # defines packages specific to the concrete nodes\n inconsistency_rule = {\"kvm03\": [\"rsync\", \"sysstat\", \"xz-utils\"], \"log01\": [\"python-elasticsearch\"], \"ctl01\": [\"python-gnocchiclient\", \"python-ujson\"]}\n exclude_packages = utils.get_configuration().get(\"skipped_packages\", [])\n group, nodes = nodes_in_group\n packages_versions = local_salt_client.cmd(tgt=\"L@\"+','.join(nodes),\n fun='lowpkg.list_pkgs',\n expr_form='compound')\n # Let's exclude cid01 and dbs01 nodes from this check\n exclude_nodes = list(local_salt_client.test_ping(tgt=\"I@galera:master or I@gerrit:client\",\n expr_form='compound').keys())\n # PROD-30833\n gtw01 = local_salt_client.pillar_get(\n param='_param:openstack_gateway_node01_hostname') or 'gtw01'\n cluster_domain = local_salt_client.pillar_get(\n param='_param:cluster_domain') or '.local'\n gtw01 += '.' + cluster_domain\n if gtw01 in nodes:\n octavia = local_salt_client.cmd(tgt=\"L@\" + ','.join(nodes),\n fun='pillar.get',\n param='octavia:manager:enabled',\n expr_form='compound')\n gtws = [gtw for gtw in list(octavia.values()) if gtw]\n if len(gtws) == 1:\n exclude_nodes.append(gtw01)\n logging.info(\"gtw01 node is skipped in test_check_package_versions\")\n\n total_nodes = [i for i in nodes if i not in exclude_nodes]\n if len(total_nodes) < 2:\n pytest.skip(\"Nothing to compare - only 1 node\")\n nodes_with_packages = []\n packages_with_different_versions = []\n packages_names = set()\n\n for node in total_nodes:\n if not packages_versions[node]:\n # TODO: do not skip node\n logging.warning(\"Node {} is skipped\".format(node))\n continue\n nodes_with_packages.append(node)\n packages_names.update(list(packages_versions[node].keys()))\n for deb in packages_names:\n if deb in exclude_packages:\n continue\n diff = []\n row = []\n for node in nodes_with_packages:\n if not packages_versions[node]:\n continue\n if deb in list(packages_versions[node].keys()):\n diff.append(packages_versions[node][deb])\n row.append(\"{}: {}\".format(node, packages_versions[node][deb]))\n else:\n row.append(\"{}: No package\".format(node))\n\n if diff.count(diff[0]) < len(nodes_with_packages):\n if not is_deb_in_exception(inconsistency_rule, deb, row):\n row.sort()\n row.insert(0, deb)\n packages_with_different_versions.append(row)\n assert len(packages_with_different_versions) == 0, (\n \"Non-uniform package versions are installed on '{}' group of nodes:\\n\"\n \"{}\".format(\n group, json.dumps(packages_with_different_versions, indent=4))\n )", "def install_version_on_node(self, nodes, version):\n install_params = dict()\n install_params['num_nodes'] = len(nodes)\n install_params['product'] = \"cb\"\n install_params['version'] = version\n install_params['vbuckets'] = [self.cluster.vbuckets]\n install_params['init_nodes'] = False\n install_params['debug_logs'] = False\n self.installer_job.parallel_install(nodes, install_params)", "def upgrade(self, servers, clients):\n if \".repo\" in self.upgrade_repo:\n repo_2 = self.upgrade_repo\n repo_1 = self.downgrade_repo\n self.updowngrade_via_repo(servers, clients, repo_1, repo_2)\n else:\n all_hosts = servers + clients\n self.updowngrade_via_rpms(all_hosts, \"upgrade\", self.upgrade_repo)", "def upgrade_test_mixed(self):\n self.upgrade_scenario(mixed_version=True)", "def upgrade_nrml(directory, dry_run, multipoint):\n for cwd, dirs, files in os.walk(directory):\n for f in files:\n path = os.path.join(cwd, f)\n if f.endswith('.xml'):\n ip = iterparse(path, events=('start',))\n next(ip) # read node zero\n try:\n fulltag = next(ip)[1].tag # tag of the first node\n xmlns, tag = fulltag.split('}')\n except: # not a NRML file\n xmlns, tag = '', ''\n if xmlns[1:] == NRML05: # already upgraded\n if 'sourceModel' in tag and multipoint:\n print('upgrading to multiPointSources', path)\n node0 = nrml.read(path)[0]\n sourceconverter.update_source_model(node0)\n with open(path, 'wb') as f:\n nrml.write([node0], f, gml=True)\n elif 'nrml/0.4' in xmlns and (\n 'vulnerability' in tag or 'fragility' in tag or\n 'sourceModel' in tag):\n if not dry_run:\n print('Upgrading', path)\n try:\n upgrade_file(path, multipoint)\n except Exception as exc:\n raise\n print(exc)\n else:\n print('Not upgrading', path)", "def all_nodes_in_env(args):\n\n environment(args, env_name=args.get('name'))\n\n chefserver = open_chef_connection(args)\n nodes = chefserver.get_all_nodes(\n args.get('name')\n )\n all_nodes = [nd['name'] for nd in nodes if nd.get('name')]\n for nd in all_nodes:\n node = chefserver.get_node(name=nd)\n node_dict = node.to_dict()\n\n # Check the run_list on the node\n run_list = node_dict.get('run_list')\n if run_list is not None:\n node_dict['run_list'] = sanitize_run_list(run_list)\n\n for attribute in ['normal', 'default', 'override']:\n attributes = node_dict.get(attribute).to_dict()\n\n backup_attributes(\n backup_dict=attributes,\n name='%s_%s_Attributes' % (nd, attribute)\n )\n node_dict[attribute] = _super_munger(attributes)\n\n chefserver.put_node(old_node=nd, new_node=node_dict)\n\n if args.get('disable_rhel_check') is False:\n rhel_check(args, servers=nodes)", "def update_nodes(nodes, sc, organization, org_id, site_names):\n for node in nodes:\n print(\"=\" * 75)\n print(\"Node:\", node[\"id\"], node[\"serial\"], node[\"model\"])\n print(\"org:\", node[\"org\"], organization)\n print(\"site:\", node[\"site\"])\n print(\"location:\", node[\"location\"])\n\n site_id = node[\"site\"]\n site_name = site_names[site_id]\n print(\"\\nSetting location to '{}'\".format(site_name))\n node[\"location\"] = site_name\n result = sc.put(\"node/\" + node[\"id\"], data=node)\n print(\"updated location:\", result[\"location\"])\n print(\"Response:\", sc.response.status_code, sc.response.reason, \"\\n\")\n print()", "def onNodesUpdate(self, api, nodes):\n if nodes != None:\n logging.info('Nodes updated ({})'.format(nodes.size()))\n self.continue_event.set()", "def _SetNodes(self, nodes: int) -> None:\n cmd = util.GcloudCommand(self, 'spanner', 'instances', 'update', self.name)\n cmd.flags['nodes'] = nodes\n cmd.Issue(raise_on_failure=True)", "def test_add_strict_node_to_non_strict_node(self):\n non_strict_node = self.cluster.master\n strict_node = self.cluster.servers[self.nodes_init:self.nodes_init + 1][0]\n self.enable_tls_encryption_cli_on_nodes \\\n (nodes=self.cluster.servers[self.nodes_init:self.nodes_init + 1])\n CbServer.use_https = True\n RestConnection(non_strict_node).add_node(user='Administrator', password='password',\n port=CbServer.ssl_port,\n remoteIp=strict_node.ip)\n CbServer.use_https = False\n rest = RestConnection(non_strict_node)\n nodes = rest.node_statuses()\n rest.rebalance(otpNodes=[node.id for node in nodes],\n ejectedNodes=[])\n result = rest.monitorRebalance()\n self.assertTrue(result, \"Rebalance failed\")", "def package_upgrade():\n\n if (do_action_package_upgrade('nova-common',\n do_openstack_upgrade,\n CONFIGS)):\n # we should restart the container scoped (subordinate) plugins after a\n # managed openstack upgrade see: BUG#1835557\n for rid in relation_ids('neutron-plugin'):\n neutron_plugin_joined(rid, remote_restart=True)\n for rid in relation_ids('nova-ceilometer'):\n nova_ceilometer_joined(rid, remote_restart=True)\n for rid in relation_ids('nova-vgpu'):\n nova_vgpu_joined(rid, remote_restart=True)\n # NOTE(ajkavanagh) - if unit is paused (usually true for managed\n # upgrade) then the config_changed() function is a no-op\n config_changed()", "def updatenodes(show, test, only_https, only_wss):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n t = PrettyTable([\"node\", \"Version\", \"score\"])\n t.align = \"l\"\n nodelist = NodeList()\n nodelist.update_nodes(morphene_instance=stm)\n nodes = nodelist.get_nodes(normal=not, wss=not only_https, https=not only_wss)\n if show or test:\n sorted_nodes = sorted(nodelist, key=lambda node: node[\"score\"], reverse=True)\n for node in sorted_nodes:\n if node[\"url\"] in nodes:\n score = float(\"{0:.1f}\".format(node[\"score\"]))\n t.add_row([node[\"url\"], node[\"version\"], score])\n print(t)\n if not test:\n mph.set_default_nodes(nodes)", "def apply_nodes(self, func=\"default\", v=ALL, inplace=True):\n super(BaseGraphStore, self).apply_nodes(func, v, inplace=True)", "def _update(self, context, values, prune_stats=False):\n return db.compute_node_update(context, self.compute_node['id'],\n values, prune_stats)", "def add_nodes_to_cluster(self, nodes, redeploy=True, check_services=False):\n self.fuel_web.update_nodes(\n self.cluster_id,\n nodes,\n )\n if redeploy:\n self.fuel_web.deploy_cluster_wait(self.cluster_id,\n check_services=check_services)", "def upgrade_kernel_node(*args, **kwargs):\n for host_string in args:\n with settings(host_string=host_string):\n execute('create_install_repo_node', host_string)\n dist, version, extra = get_linux_distro()\n if version == '12.04':\n print \"upgrading apparmor before upgrading kernel\"\n apt_install([\"apparmor\"])\n print \"Installing 3.13.0-34 kernel headers\"\n apt_install([\"linux-headers-3.13.0-34\"])\n apt_install([\"linux-headers-3.13.0-34-generic\"])\n print \"Upgrading the kernel to 3.13.0-34\"\n apt_install([\"linux-image-3.13.0-34-generic\"])\n default_grub='Advanced options for Ubuntu>Ubuntu, with Linux 3.13.0-34-generic'\n execute('set_grub_default_node', host_string, value=default_grub)\n elif version == '14.04':\n if 'version' in kwargs:\n kernel_ver = kwargs.get('version')\n else:\n kernel_ver = \"3.13.0-106\"\n print \"Installing \"+kernel_ver+\" kernel headers\"\n apt_install([\"linux-headers-\"+kernel_ver,\n \"linux-headers-\"+kernel_ver+\"-generic\"])\n print \"Upgrading the kernel to \"+kernel_ver\n apt_install([\"linux-image-\"+kernel_ver+\"-generic\",\n \"linux-image-extra-\"+kernel_ver+\"-generic\"])\n default_grub='Advanced options for Ubuntu>Ubuntu, with Linux '+kernel_ver+'-generic'\n execute('set_grub_default_node', host_string, value=default_grub)\n elif 'red hat' in dist.lower() and version.startswith('7'):\n print \"Upgrading RHEL kernel to version 3.10.0-327.10.1\"\n pkg_install([\"kernel-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-tools-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-tools-libs-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-headers-3.10.0-327.10.1.el7.x86_64\"], disablerepo=False)\n default_grub='Red Hat Enterprise Linux Server (3.10.0-327.10.1.el7.x86_64) 7.2 (Maipo)'\n execute('set_grub_default_node', host_string, value=default_grub)\n elif 'centos linux' in dist.lower() and version.startswith('7'):\n print \"Upgrading Centos kernel to version 3.10.0-327.10.1\"\n pkg_install([\"kernel-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-tools-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-tools-libs-3.10.0-327.10.1.el7.x86_64\",\n \"kernel-headers-3.10.0-327.10.1.el7.x86_64\"], disablerepo=False)\n default_grub='CentOS Linux (3.10.0-327.10.1.el7.x86_64) 7 (Core)'\n execute('set_grub_default_node', host_string, value=default_grub)", "def network_node_value_added(self, node=None, value=None, args=None):\n if node and node.node_id != self.node_id:\n return\n if args is not None and \"nodeId\" in args and args[\"nodeId\"] != self.node_id:\n return\n\n self.maybe_update_application_version(value)", "def add_nodes_from(self, nodes):\n self._Impl._nodes[\"all_nodes\"] = cudf.Series(nodes)", "def scale_nodes_in(self, nodes):\n for node in nodes:\n self.nodes.remove(node)\n return self.terminate_instances(node.instance_id for node in nodes)", "def test_upgrade_apply_from_previous(setup, platform, skuba):\n\n setup_kubernetes_version(skuba, PREVIOUS_VERSION)\n\n outs = {}\n for (r, n) in [(\"master\", 0), (\"worker\", 0)]:\n node = \"my-{}-{}\".format(r, n)\n outs[node] = skuba.node_upgrade(\"apply\", r, n)\n\n master = outs[\"my-master-0\"]\n assert master.find(\"successfully upgraded\") != -1\n\n worker = outs[\"my-worker-0\"]\n assert worker.find(\"successfully upgraded\") != -1", "def update(self, iterable):\n self._update_nodes(iterable)", "def set_node_property_bypass(node_names, new_values, visual_property, bypass=True, network=None,\n base_url=DEFAULT_BASE_URL):\n net_suid = networks.get_network_suid(network, base_url=base_url)\n view_suid = network_views.get_network_views(net_suid, base_url=base_url)[0]\n node_suids = node_name_to_node_suid(node_names, network=network, base_url=base_url)\n\n # TODO: Shouldn't we allow node_names=None to mean all nodes? ... as is, this causes an error below and is inconsistent with other functions\n # TODO: Find out how to test for bypass=True effects\n\n # there can be more than one node.SUID per node.name!\n # 'node.SUIDs' and 'new.values' must have the same length\n # TODO: Should we allow a scalar for a new_value, or do we assume a list?\n if not isinstance(new_values, list): new_values = [new_values]\n if len(new_values) == 1: new_values = new_values * len(node_suids)\n\n if visual_property is None: # TODO: Added this ... but what about an invalid property?\n raise CyError('Invalid visual property')\n\n if len(new_values) != len(node_suids):\n error = 'ERROR in set_node_property_bypass():\\n the number of nodes ' + str(\n len(node_suids)) + ' and new values ' + str(len(\n new_values)) + ' are not the same >> node(s) attribute couldn\\'t be set. Note that having multiple nodes with the same name in the network can cause this error. Use node SUIDs or pass in duplicated names on their own.'\n sys.stderr.write(error)\n return None # TODO: Is this what we want to return here?\n\n body_list = [{'SUID': str(suid), 'view': [{'visualProperty': visual_property, 'value': val}]} for suid, val in\n zip(node_suids, new_values)]\n\n res = commands.cyrest_put('networks/' + str(net_suid) + '/views/' + str(view_suid) + '/nodes',\n parameters={'bypass': bypass}, body=body_list, base_url=base_url, require_json=False)\n return res", "def update(self, nodes = None, connections = None):\r\n\r\n node_dict = node_dictionary()\r\n\r\n # FIXME: use either node type identifier or fully initialized node, not\r\n # node class (Warning: might break some existing code,\r\n # depreciate it first\r\n\r\n nodes = nodes or {}\r\n connections = connections or []\r\n\r\n for (name, obj) in nodes.items():\r\n if isinstance(obj, Node):\r\n node_instance = obj\r\n elif isinstance(obj, type) and issubclass(obj, Node):\r\n self.logger.warn(\"Using classes in Stream.update is depreciated\")\r\n node_instance = obj()\r\n else:\r\n if not \"type\" in obj:\r\n raise Exception(\"Node dictionary has no 'type' key\")\r\n node_type = obj[\"type\"]\r\n\r\n if node_type in node_dict:\r\n node_class = node_dict[node_type]\r\n node_instance = node_class()\r\n\r\n node_instance.configure(obj)\r\n else:\r\n raise Exception(\"No node class of type '%s'\" % node_type)\r\n\r\n self.add(node_instance, name)\r\n\r\n if connections:\r\n for connection in connections:\r\n self.connect(connection[0], connection[1])", "def update_all_nodes(batchserver_name):\n server,created = getBatchServer(batchserver_name)\n if not pbs_data_nodes.has_key(batchserver_name):\n pbs_data_nodes[batchserver_name] = {'last_update':None, 'nodes':{}}\n\n if pbs_data_nodes[batchserver_name]['last_update'] and (datetime.datetime.now()-pbs_data_nodes[batchserver_name]['last_update']).total_seconds()<GlobalConfiguration.objects.get(pk=1).max_lastupdate:\n logging.debug(\"Nodes info is new enough for server: %s\" % batchserver_name)\n print \"not updated\"\n return pbs_data_nodes\n\n print \"updated\"\n\n conn = pbs.pbs_connect(batchserver_name.encode('iso-8859-1', 'replace'))\n if conn==-1:\n logging.error(\"Cannot connect to %s - live data will be missing\" % server.name)\n return\n statnodes = pbs.pbs_statnode(conn, \"\" , [], \"\")\n pbs.pbs_disconnect(conn)\n\n for sn in statnodes:\n node,created = getNode(sn.name, server)\n attr_dict = dict([ (x.name,x.value) for x in sn.attribs])\n pbs_data_nodes[batchserver_name]['nodes'][node] = update_one_node_from_pbs_data(node, attr_dict)\n pbs_data_nodes[batchserver_name]['last_update'] = datetime.datetime.now()\n\n return pbs_data_nodes", "def _update_dead_nodes(self) -> None:\n with self._dead_node_lock:\n self._dead_nodes = self._seen_nodes - self._running_nodes", "def update_nodes_df(nodes: pandas.DataFrame) -> None:\n nodes_clean = nodes.copy(deep=True)\n # Ensure that all '' values are NaN, so that those rows can be easily removed with dropna()\n nodes_clean.replace('', numpy.nan, inplace=True)\n nodes_clean.dropna(axis=0, how='any', inplace=True)\n nodes_clean.drop_duplicates(keep='first', inplace=True, ignore_index=True)\n\n print('\\nCache used at start of function: ' + str(read_node.cache_info()) + '.')\n print('There are ' + str(len(nodes_clean)) + ' nodes, updating node: 0 ', end='')\n count = 0\n columns = nodes_clean.columns\n for row in nodes_clean.itertuples():\n count += 1\n if count % 250 == 0:\n print(count, ' ', end='', flush=True)\n if count % 10000 == 0:\n print('\\n', end='', flush=True)\n\n node_properties = {}\n for prop_name in RICGRAPH_PROPERTIES_ADDITIONAL:\n for other_name in columns:\n if prop_name == other_name:\n node_properties[prop_name] = getattr(row, other_name)\n\n update_node(name=row.name, category=row.category, value=row.value,\n **node_properties)\n\n print(count, '\\n', end='', flush=True)\n print('Cache used at end of function: ' + str(read_node.cache_info()) + '.')\n return", "def remove_nodes_from_cluster(self, nodes, redeploy=True,\n check_services=False):\n self.fuel_web.update_nodes(\n self.cluster_id,\n nodes,\n pending_addition=False, pending_deletion=True,\n )\n if redeploy:\n self.fuel_web.deploy_cluster_wait(self.cluster_id,\n check_services=check_services)" ]
[ "0.5882777", "0.5731162", "0.56259555", "0.5442896", "0.5395769", "0.5358854", "0.5262287", "0.5247419", "0.524426", "0.5210196", "0.51772773", "0.51600504", "0.51503044", "0.5118009", "0.5094011", "0.5081805", "0.50662416", "0.5063596", "0.5050908", "0.50414723", "0.49989244", "0.4982807", "0.49699947", "0.49516693", "0.49359322", "0.4922608", "0.49187964", "0.49126336", "0.4906353", "0.48907092" ]
0.7341229
0
raise exception if environment variable is not set
def check_envvar(envvar): if not os.environ.get(envvar): raise EnvironmentError("Variable '%s' not set" % envvar)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getenv_check(e):\n res = os.getenv(e)\n if res == None:\n print(e, 'environment variable not set - stopping.')\n exit(1)\n else:\n return res", "def _check_env():\n\tif os.getenv(_DATA_DIRECTORY_ENV_KEY) is None:\n\t\texit_everything(ERROR_DATA_DIRECTORY_NOT_SET, f'{_DATA_DIRECTORY_ENV_KEY} env var not set')\n\t\n\tif os.getenv(_FRONTEND_URL_ENV_KEY) is None:\n\t\texit_everything(ERROR_FRONTEND_NOT_SET, f'{_FRONTEND_URL_ENV_KEY} env var not set')", "def check_environment() -> None:\n for item in ['IB_USER', 'IB_PASSWORD', 'IB_URL']:\n if os.getenv(item) is None:\n raise click.UsageError(f'{item} environment variable must be set before using ib.')", "def verify_environment():\n reqs = ['NAME', 'RECIPIENT', 'SUBJECT', 'MESSAGE',\n 'MAILGUN_API_KEY', 'MAILGUN_DOMAIN']\n for req in reqs:\n if not os.getenv(req):\n logging.error('Environment variable ' + req + ' is not set')\n sys.exit(2)", "def get_env_or_exception(key):\n \n value = os.getenv(key)\n if value is None:\n raise ImproperlyConfigured(f'{key} env variable is not set')\n\n return value", "def getenv_or_raise_exception(varname) -> str:\n\n env = os.getenv(varname)\n if env is None:\n raise EnvironmentError(f\"Environment variable {varname} is not set!\")\n return env", "def check_env():\n if 'CALLBACK_URL' not in os.environ:\n die('Environment variable CALLBACK_URL not set'.format(env))", "def _set(env_var: str) -> bool:\n return os.getenv(env_var) not in [None, \"0\"]", "def get_required_env_variable(var_name):\n try:\n return os.environ[var_name]\n except KeyError:\n error_msg = 'Set the {0} environment variable'.format(var_name)\n raise ImproperlyConfigured(error_msg)", "def get_env_variable(self, var_name, optional=False):\n try:\n return environ[var_name]\n except KeyError:\n if optional:\n return False\n else:\n error_msg = f'Error: You must set the {var_name} environment variable.'\n raise Exception(error_msg)", "def print_env_var_missing(env_var: \"EnvironmentVariable\"):\n _print_error(f\"Environment variable '{env_var.value}' missing\")", "def test_fromEnv_bad5(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_ORPORT\"] = \"lulz\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")", "def _check_env_var_presence_s3_db(env_var_name):\n if os.environ.get(env_var_name) is None:\n logger.info(\"Warning: the {name} environment variable is not set.\\n\"\n \"All tests that access AWS S3 database will fail\\n\".format(\n name=env_var_name))", "def config_env_var_verify():\n with open('skywalking/config.py', 'r') as config_file:\n data = config_file.read().replace('\\n', '')\n for each in OPTIONS.keys():\n if f'_{each.upper()}' not in data:\n raise Exception(f'Environment variable for {each.upper()} is not found in config.py\\n'\n f'This means you have a mismatch of config.py variable and env var name')", "def validate_env(self) -> None:\n errors = []\n\n self.user_name = env.str('USER_NAME')\n if not self.user_name:\n errors.append('USER_NAME environment variable needs to be set to your MyQ user name')\n\n self.password = env.str('PASSWORD')\n if not self.password:\n errors.append('PASSWORD environment variable needs to be set to your MyQ password')\n\n self.left_door = env.int('EDGEWOOD', 0)\n self.right_door = 1 - self.left_door\n\n self.only_close = env.bool('ONLY_CLOSE', True)\n\n if errors:\n raise Exception(','.join(errors))", "def check_environ():\n global _environ_checked\n if _environ_checked:\n return\n\n if os.name == 'posix' and 'HOME' not in os.environ:\n import pwd\n os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]\n\n if 'PLAT' not in os.environ:\n os.environ['PLAT'] = _sysconfig.get_platform()\n\n _environ_checked = 1", "def test_fromEnv_bad4(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_EXTENDED_SERVER_PORT\"] = \"cakez\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")", "def cli(ctx: click.Context):\n try:\n # Ensure the necessary environment variables are set before proceeding.\n all(environ[env_var] for env_var in Env.values())\n\n except KeyError as exc:\n ctx.fail(f\"Missing environment variable: {exc}\")", "def test_required(self, missing_param):\n with mock.patch.dict('os.environ', {\n **REQUIRED_SETTINGS,\n missing_param: '',\n }, clear=True), self.assertRaises(ImproperlyConfigured):\n self.reload_settings()", "def test_fromEnv_bad6(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_SERVER_BINDADDR\"] = \"dummy-lyrical_content,boom-127.0.0.1:6666\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")", "def _require_environment():\n require('environment', 'host', provided_by=ENVS.keys())", "def test_no_existing_value(self):\n var_name = \"PICCOLO_TEST_1\"\n\n # Make sure it definitely doesn't exist already\n if os.environ.get(var_name) is not None:\n del os.environ[var_name]\n\n new_value = \"hello world\"\n\n with set_env_var(var_name=var_name, temp_value=new_value):\n self.assertEqual(os.environ.get(var_name), new_value)\n\n self.assertEqual(os.environ.get(var_name), None)", "def test_invalid_envfile(self):\n with pytest.raises(yaenv.EnvError) as err:\n _ = yaenv.Env('/invalidfile')\n assert 'does not exist' in str(err.value)", "def test_environ_vars_available(self) -> None:\n self.assertIsNotNone(os.environ.get('AWS_ACCESS_KEY_ID'))\n self.assertIsNotNone(os.environ.get('AWS_SECRET_KEY'))\n self.assertIsNotNone(os.environ.get('AWS_REGION_NAME'))\n self.assertIsNotNone(os.environ.get('S3_BUCKET'))", "def test_fromEnv_bad9(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_SERVER_BINDADDR\"] = \"dummy-127.0.0.1:5556\"\n TEST_ENVIRON[\"TOR_PT_SERVER_TRANSPORTS\"] = \"dummy,laughs\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")", "def get_value(key:str):\n value = environ.get(key)\n if value == None or len(str(value)) == 0:\n raise ValueError('Missing env: '+key)\n return value", "def test__saxo_environment(self):\n with self.assertRaises(KeyError) as envErr:\n API(environment=\"faulty\", access_token=self.access_token)\n\n self.assertTrue(\"Unknown environment\" in \"{}\".format(envErr.exception))", "def test_fromEnv_bad8(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_SERVER_BINDADDR\"] = \"dummy-127.0.0.1:5556,laughs-127.0.0.1:6666\"\n TEST_ENVIRON[\"TOR_PT_SERVER_TRANSPORTS\"] = \"dummy\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")", "def checkEnvVar(self):\n for path in self.config.options('ENV'):\n if (self.config.get('ENV', path)).startswith('/'):\n print (\"Checking path for \"+path).ljust(65, '.'),\n if not os.path.exists(self.config.get('ENV', path)):\n print \"[ Failed ]\"\n print \"\\n***ERROR: %s not found. Check the config file.\" % path\n sys.exit()\n else:\n print \"[ OK ]\"", "def test_fromEnv_bad7(self):\n TEST_ENVIRON = dict(BASE_ENVIRON)\n TEST_ENVIRON[\"TOR_PT_SERVER_BINDADDR\"] = \"dummy-127.0.0.1:5556,laughs-127.0.0.1:6666\"\n TEST_ENVIRON[\"TOR_PT_SERVER_TRANSPORTS\"] = \"dummy,boom\"\n os.environ = TEST_ENVIRON\n self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)\n self.assertOutputLinesStartWith(\"ENV-ERROR \")" ]
[ "0.7693455", "0.75979894", "0.7594372", "0.74920285", "0.74750453", "0.7391544", "0.7008469", "0.69389886", "0.69301397", "0.6909712", "0.685651", "0.6856387", "0.68016106", "0.6705088", "0.6686351", "0.6683706", "0.6681883", "0.6654641", "0.66065437", "0.65902627", "0.65768045", "0.6569711", "0.6535023", "0.65228903", "0.65176004", "0.651588", "0.6492395", "0.6490133", "0.6486825", "0.64381766" ]
0.8079858
0
load created json files to S3 bucket
def upload(jsonfiles): # clear S3 Bucket bucket = S3Bucket() bucket.clear() for jsonfile in jsonfiles: filename = os.path.basename(jsonfile) key = build_key(filename) logging.info("%s %s", filename, key) # store json in S3 object bucket.store(key, jsonfile)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _s3_stash(self):\n s3_url = 's3://{}/{}'.format(BUCKET, self.atom_file)\n bucketpath = BUCKET.strip(\"/\")\n bucketbase = BUCKET.split(\"/\")[0]\n parts = urlparse.urlsplit(s3_url)\n mimetype = 'application/xml' \n \n conn = boto.connect_s3()\n\n try:\n bucket = conn.get_bucket(bucketbase)\n except boto.exception.S3ResponseError:\n bucket = conn.create_bucket(bucketbase)\n self.logger.info(\"Created S3 bucket {}\".format(bucketbase))\n\n if not(bucket.get_key(parts.path)):\n key = bucket.new_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"created {0}\".format(s3_url)\n self.logger.info(msg)\n else:\n key = bucket.get_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"re-uploaded {}\".format(s3_url)\n self.logger.info(msg)", "def load_data_s3(filename):\n \n global s3_client\n\n if s3_client is None:\n logger.debug('Creating new S3 client.')\n s3_client = boto3.client('s3') \n\n try:\n logger.debug('Loading batch to S3.')\n response = s3_client.upload_file('/tmp/'+filename, os.environ['BUCKET_NAME'], str(os.environ['BUCKET_PATH']) \n + '/' + filename)\n\n except Exception as ex:\n logger.error('Exception in loading data to s3 message: {}'.format(ex))\n send_sns_alert(str(ex))\n raise", "def sync_to_bucket(s3_url,\n region='eu-west-1',\n profile_name=None):\n\n parsed_s3_url = urlparse.urlparse(s3_url);\n\n bucket_name = parsed_s3_url.hostname;\n key_prefix = parsed_s3_url.path;\n if key_prefix[0] == '/':\n key_prefix = key_prefix[1:]\n if key_prefix[-1] != '/':\n key_prefix = key_prefix + '/'\n\n def inner(fn_inner):\n \"\"\"\n Decorator function function sent in should be having signature\n func(None,None, XmlDoc) and should yield JSON document one for\n each file that should be persisted to S3\n \"\"\"\n\n def handler(event, context):\n \"\"\"\n The AWS Lambda Entry Point\n \"\"\"\n s3conn = s3.connect_to_region(region, profile_name=profile_name)\n bucket = s3conn.get_bucket(bucket_name)\n\n # Use a map to track keys that are no longer in the feed, used for deletion\n remaining_keys = { key.name : True for key in bucket.list(prefix=key_prefix)}\n\n logger.debug(\"Existing keys in bucket\\n%s\", '\\n'.join(remaining_keys));\n\n for id, json_data in fn_inner():\n key_name = key_prefix + str(uuid.uuid5(uuid.NAMESPACE_URL, id.encode('utf-8')))\n\n # Key found, delete it from cleanup map\n if key_name in remaining_keys:\n del remaining_keys[key_name]\n\n string_data = json.dumps(json_data)\n s3_object = bucket.get_key(key_name)\n if s3_object == None:\n key = bucket.new_key(key_name);\n key.set_contents_from_string(string_data)\n logger.info('Creating:\\ts3://%s/%s', bucket_name, key_name)\n logger.debug(string_data)\n else:\n if s3_object.etag[1:len(s3_object.etag)-1] != s3etag.from_string(string_data):\n logger.info('Updating:\\ts3://%s/%s', bucket_name, key_name)\n logger.debug(string_data)\n s3_object.set_contents_from_string(string_data)\n else:\n logger.info('Same:\\ts3://%s/%s', bucket_name, key_name);\n logger.debug(string_data)\n\n # Remvoe remaining keys from the bucket to allow for cleanup\n for key in remaining_keys:\n logger.info('Removing:\\ts3://%s/%s', bucket_name, key);\n bucket.delete_key(key);\n\n logger.info('Done');\n\n return handler\n\n return inner", "def upload_json_to_s3(directory):\n for f in directory.iterdir():\n if str(f).endswith('.json'):\n full_file_path = str(f.parent) + \"/\" + str(f.name)\n file_name = str(f.name)\n s3_client.upload_file(full_file_path, BASE_BUCKET, file_name)", "def load_archives_from_s3(self):\n s3_bucket = S3Backend(self.conf).bucket\n try:\n k = Key(s3_bucket)\n k.key = self.backup_key\n\n return json.loads(k.get_contents_as_string())\n except S3ResponseError, exc:\n log.error(exc)\n return {}", "def save_file_aws(obj, file_path, aws_credentials):\n bucket_engine = S3Bucket(*aws_credentials)\n data = gzip.compress(json.dumps(obj).encode('utf-8'))\n bucket_engine.write(file_path, data)", "def s3(ctx, bucket_name, data_file, region):\n ctx.obj['BUCKET_NAME'] = bucket_name\n ctx.obj['DATA_FILE'] = data_file\n ctx.obj['TYPE'] = 's3'\n ctx.obj['REGION'] = region", "def load_s3_njson(bucket, prefix, key_list, honorary_list):\n # Get list of files in bucket and with prefix:\n s3_file_list = list_s3_files(bucket, prefix)\n \n # Load data from all files:\n structured_data = []\n for s3_file in s3_file_list:\n structured_data = structured_data + s3_file_to_dict_list(bucket, s3_file, key_list, honorary_list)\n \n return structured_data", "def lambda_handler(event, context):\n \n filename = None\n fobj = None\n\n try:\n \n filename = 'dlq' + '-' + datetime.datetime.now().strftime(\"%s\")\n fobj = open('/tmp/'+filename, 'w')\n logger.debug('S3 client set up.')\n\n for record in event['Records']:\n fobj.write(json.dumps(record['body']))\n fobj.write(\"\\n\")\n \n except Exception as ex:\n logger.error('Exception in executing ingestion to S3: {}'.format(ex))\n send_sns_alert(str(ex))\n raise\n\n else:\n \n #Saves file to S3\n fobj.close()\n load_data_s3(filename)\n\n return {\n 'statusCode': 200,\n 'body': json.dumps('Success!')\n }\n\n finally:\n\n # S3 - close temp object\n fobj.close()", "def main(transcribe_bucket_name, mp3_bucket_name):\n\n s3 = boto3.resource('s3')\n for bucket in s3.buckets.all():\n if bucket.name == transcribe_bucket_name:\n for key in bucket.objects.all():\n if key.key.endswith('.json'):\n r = {}\n # Get reference number\n reference = basename(key.key).replace('.json', '')\n r['ref'] = reference\n # Get URL\n location = boto3.client('s3') \\\n .get_bucket_location(\n Bucket=mp3_bucket_name)['LocationConstraint']\n base_url = join('https://s3-%s.amazonaws.com' % location,\n mp3_bucket_name)\n url = join(base_url, key.key.replace('.json', '.mp3'))\n r['url'] = url\n # Download json file\n try:\n s3.Bucket(transcribe_bucket_name) \\\n .download_file(key.key, key.key)\n except Exception as exception:\n return 1\n # Get text\n with open(key.key, 'r') as f:\n data = json.load(f)\n text = data['results']['transcripts'][0]['transcript']\n r['text'] = text\n # Get sentiment\n sentiment = get_sentiment(text)\n r['sentiment'] = sentiment\n # Check promotion\n promo = check_promo(text)\n r['promo'] = promo\n # Save to Gooogle Sheets\n values = [r['ref'], r['text'], r['promo'], r['sentiment'],\n r['url']]\n append_row(values)\n # Remove tmp json file from local machine\n remove(key.key)", "def download_json_metadata_from_s3(bucket_name, prefix=\"\", num_threads=20):\n\n # simple method for threads to pull from a queue and download JSON files\n def download_object(queue):\n while True:\n obj = queue.get()\n if obj is None:\n break\n obj.Object().download_file(obj.key.replace(prefix, ''))\n queue.task_done()\n\n # create a directory to store downloaded metadata\n cwd = Path.cwd()\n data_dir = cwd / 'data'\n json_dir = data_dir / 'json'\n # try:\n os.makedirs(json_dir, exist_ok=True)\n # except FileExistsError:\n # shutil.rmtree(json_dir)\n # os.makedirs(json_dir)\n os.chdir(json_dir)\n\n # create a queue for objects that need to be downloaded\n # and spawn threads to download them concurrently\n download_queue = Queue(maxsize=0)\n workers = []\n for worker in range(num_threads):\n worker = Thread(target=download_object, args=(download_queue, ))\n worker.setDaemon(True)\n worker.start()\n workers.append(worker)\n\n # loop through the files in the bucket and filter for JSON metadata\n # files for only labeled images; add them to the queue\n s3 = boto3.resource(\"s3\")\n bucket = s3.Bucket(bucket_name)\n for obj in bucket.objects.filter(Prefix=prefix):\n if obj.key.endswith(\"meta.json\"):\n download_queue.put(obj)\n\n # wait for the queue to be empty, then join all threads\n download_queue.join()\n for _ in range(num_threads):\n download_queue.put(None)\n for worker in workers:\n worker.join()\n\n os.chdir(cwd)", "def pushToS3()-> None:\n logging.info(f\"Connecting to s3 {getTime()}\")\n s3 = boto3.client(\"s3\",endpoint_url=\"http://localhost:4566\")\n if(not s3.head_bucket(Bucket=\"demo\")):\n s3.create_bucket(Bucket='demo')\n try:\n logging.info(f\"Uploading to s3 {getTime()}\")\n s3.upload_file(\"result.csv\",\"demo\",\"result.csv\")\n logging.info(f\"Finished uploding to s3 {getTime()}\")\n except ClientError as e:\n logging.error(f\"Error uploading file to S3 {getTime()}\")", "def load_from_s3(self, bucket, prefix=None):\r\n n = 0\r\n if prefix:\r\n prefix = '%s/' % prefix\r\n else:\r\n prefix = '%s/' % self.id[1:]\r\n rs = bucket.list(prefix=prefix)\r\n for key in rs:\r\n n += 1\r\n m = self.new_message(key.get_contents_as_string())\r\n self.write(m)\r\n return n", "def read_s3_file(date):\n \"\"\" history from S3 \"\"\"\n bucket = os.getenv(\"SPOTIFY_BUCKET_NAME\")\n path = os.getenv(\"SPOTIFY_BUCKET_PATH\")\n s3 = boto3.resource('s3')\n try:\n s3.Object(bucket, \"%s/%s.json\" % (path, date)).load()\n except botocore.exceptions.ClientError as e:\n logger.info(\"No existing history file found for %s, %s\" %\n (date, e.response['Error']['Code']))\n if e.response['Error']['Code'] == '404':\n return []\n else:\n logger.warning(\"Unexpected error code returned!\")\n return []\n else:\n logger.info(\"Reading history file for %s\" % date)\n content_object = s3.Object(bucket, \"%s/%s.json\" % (path, date))\n file_content = content_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def handler(event, context):\n s3conn = s3.connect_to_region(region, profile_name=profile_name)\n bucket = s3conn.get_bucket(bucket_name)\n\n # Use a map to track keys that are no longer in the feed, used for deletion\n remaining_keys = { key.name : True for key in bucket.list(prefix=key_prefix)}\n\n logger.debug(\"Existing keys in bucket\\n%s\", '\\n'.join(remaining_keys));\n\n for id, json_data in fn_inner():\n key_name = key_prefix + str(uuid.uuid5(uuid.NAMESPACE_URL, id.encode('utf-8')))\n\n # Key found, delete it from cleanup map\n if key_name in remaining_keys:\n del remaining_keys[key_name]\n\n string_data = json.dumps(json_data)\n s3_object = bucket.get_key(key_name)\n if s3_object == None:\n key = bucket.new_key(key_name);\n key.set_contents_from_string(string_data)\n logger.info('Creating:\\ts3://%s/%s', bucket_name, key_name)\n logger.debug(string_data)\n else:\n if s3_object.etag[1:len(s3_object.etag)-1] != s3etag.from_string(string_data):\n logger.info('Updating:\\ts3://%s/%s', bucket_name, key_name)\n logger.debug(string_data)\n s3_object.set_contents_from_string(string_data)\n else:\n logger.info('Same:\\ts3://%s/%s', bucket_name, key_name);\n logger.debug(string_data)\n\n # Remvoe remaining keys from the bucket to allow for cleanup\n for key in remaining_keys:\n logger.info('Removing:\\ts3://%s/%s', bucket_name, key);\n bucket.delete_key(key);\n\n logger.info('Done');", "def upload_data_to_s3(data: dict, bucket_name: str, object_key: str) -> None:\n uploader = S3Uploader(bucket_name)\n with tempfile.NamedTemporaryFile(mode=\"w+\") as local_file:\n json.dump(data, local_file, cls=FancyJsonEncoder, indent=\" \", sort_keys=True)\n local_file.write(\"\\n\")\n local_file.flush()\n uploader(local_file.name, object_key)", "def _upload_s3(self, filename, bucket, objectKey):\n return s3_client.upload_file(filename, bucket, objectKey)", "def setup_buckets():\n s3 = boto.connect_s3()\n s3.create_bucket('mls_data.mls.angerilli.ca')", "def write_s3_file(data, date):\n logger.info(\"Writing history file to S3.\")\n bucket = os.getenv(\"SPOTIFY_BUCKET_NAME\")\n path = os.getenv(\"SPOTIFY_BUCKET_PATH\")\n s3 = boto3.client('s3')\n data = json.dumps(data)\n s3.put_object(Bucket=bucket, Key=\"%s/%s.json\" % (path, date), Body=data)", "def get_amazon_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n amzn_filename = \"AMZN.json\"\n complete_path = os.path.join(prefix, amzn_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def s3_load(self, bucket_name, file_name, region):\n cursor = self.conn.cursor()\n\n table_name = file_name.split(\".\")[0]\n\n cursor.execute(\"\"\"SELECT aws_s3.table_import_from_s3(\\\n %s, '', '(format csv)', \n %s, %s, %s\n );\n \"\"\", (table_name, bucket_name, file_name, region))\n \n print (\"Loading data ...\")\n self.conn.commit()\n return", "def get_gzipped_s3_objects_from_dict(session, event):\n return get_s3_objects_from_dict(\n session, event, default_unzip_s3_object_handler_function\n )", "def convert_to_json(basepath, sendto):\n\n logger = logging.getLogger('WikiLog')\n\n k = bucket.new_key(basepath)\n\n filenames = []\n year = month = day = hrs = ''\n\n for key in bucket.list():\n thisfile = key.name.encode('utf-8')\n if 'projectviews' not in thisfile and 'sql' not in thisfile and '.gz' in thisfile and thisfile.startswith(basepath):\n # S3 key name is of the format kt-wiki/pageviews/2016/2016-06/pageviews-20160601-000000.gz\n # Split by / to get last element\n filenames.append(thisfile)\n logger.info(\"Processing file: {}\".format(thisfile))\n fname = thisfile.split('/')\n\n # Get content from filename and save to local\n # Split again to Grab year, month, day, hour value from filename\n key.get_contents_to_filename('/home/ubuntu/WikiView/data/' + fname[-1])\n fname1 = fname[-1]\n data_time = fname1[:-3].split('-')\n year, month, day, hrs = data_time[1][:4], data_time[1][4:6], data_time[1][-2:], data_time[-1]\n\n docname = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n dictlist = []\n\n # save file from s3 to local, read, write to json, push json to s3\n with open(docname, 'w') as fp:\n #\n with gzip.open('/home/ubuntu/WikiView/data/'+fname[-1],'r') as fin:\n for line in fin:\n line = line.split(' ')\n doc = {}\n doc['ymdh'] = year + '-' + month + '-' + day + '-' + hrs\n try:\n # format: project, title, views, bytes ~ en Main_Page 242332 4737756101\n prj, title, vcount = line[0], line[1], line[2]\n doc['prj'] = prj\n doc['title'] = title\n doc['vcount'] = vcount\n json.dump(doc,fp)\n fp.write('\\n')\n except:\n logger.error('Error reading gzip file {} at line: {}'.format(thisfile, line))\n pass\n# sys.exc_clear()\n\n # Now, save the json file to \n key_name = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n full_key_name = os.path.join(sendto, key_name)\n k = bucket.new_key(full_key_name)\n\n logger.info(\"Sending json file to S3: {}\".format(docname))\n k.set_contents_from_filename(key_name)\n\n # Remove temp file\n logger.info(\"Removing temp file: {} {}\".format('/home/ubuntu/WikiView/data/', fname[-1]))\n os.remove('/home/ubuntu/WikiView/data/'+fname[-1])\n logger.info(\"Removing temp file: {}\".format(key_name))\n os.remove(key_name)\n logger.info('Finished!!!')", "def write_to_s3(df, bucket, path):\n pass", "def upload_files_s3(files, bucket):\n \n print('************************************')\n print('Uploading files to s3 bucket...')\n print('************************************')\n \n for i in range(len(files)):\n upload_file_s3(files[i], bucket)\n \n print('************************************')\n print('Upload complete')\n print('************************************')", "def data_pull_s3(self):\n year = self.month_year[0]\n month = self.month_year[1]\n self.s3 = boto3.resource('s3',aws_access_key_id=self.creds_data['key_id'],\n aws_secret_access_key=self.creds_data['key_access'])\n bucket = self.s3.Bucket('himatdata')\n home = os.getcwd()\n file_path = os.path.join(*[home, 'Trmm/', self.output_folder, year + '_' + month])\n print(file_path)\n if not os.path.exists(file_path):\n os.makedirs(file_path)\n for obj in bucket.objects.filter(Delimiter='', Prefix='Trmm/{}{}_{}'.format(self.output_folder, year, month)):\n if obj.key.endswith('.nc4'):\n bucket.download_file(obj.key,os.path.join(os.path.join(home, obj.key)))\n logging.info(\"Done with Year Month: %s\", month_year)", "def collect_s3(self):\n print('Collecting artifacts matching %s from S3 bucket %s' % (self.match, s3_bucket))\n self.s3 = boto3.resource('s3')\n self.s3_bucket = self.s3.Bucket(s3_bucket)\n self.s3_client = boto3.client('s3')\n for item in self.s3_client.list_objects(Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):\n self.collect_single(item.get('Key'))\n\n for a in self.artifacts:\n a.download()", "def s3_store_data(self):\n\n USERHOMEDIR = os.path.expanduser('~')\n TESTFILEPATH = \"%s/3MBFILE\" % USERHOMEDIR\n if not os.path.exists(TESTFILEPATH):\n with open(TESTFILEPATH, \"wb\") as out:\n out.truncate(1024 * 1024 * 3)\n self.k.set_contents_from_filename(TESTFILEPATH)", "def lambda_handler(event, context):\n\n for record in event['Records']:\n\n bucket = record['s3']['bucket']['name']\n key = unquote_plus(record['s3']['object']['key'])\n\n str_value = s3_utils.download_file_as_string(bucket, key)\n data = json.loads(str_value)\n\n normalized_data = {\n 'meta': {\n 'table': 'parcels',\n 'column_names': [\n 'dataset',\n 'as_of',\n 'apn',\n 'objectid',\n 'city',\n 'x_coordinate',\n 'y_coordinate',\n 'area',\n 'length'\n ]\n }\n }\n\n rows = []\n\n dataset = data['meta']['dataset']\n as_of = data['meta']['datetime']\n\n for r in data['results']:\n\n attr = r['attributes']\n\n temp_dict = {\n 'dataset': dataset,\n 'as_of': as_of,\n 'apn': attr.get('APN_SPACE'),\n 'objectid': attr.get('OBJECTID'),\n 'city': attr.get('CITY'),\n 'x_coordinate': attr.get('X'),\n 'y_coordinate': attr.get('Y'),\n 'area': attr.get('Shape.STArea()'),\n 'length': attr.get('Shape.STLength()')\n }\n\n rows.append(temp_dict)\n\n normalized_data['rows'] = rows\n \n bucket = 'gis-data-normalized'\n file_name = 'normalized_' + key\n s3_utils.upload_json_as_file(normalized_data, bucket, file_name)", "def put_data_in_s3(data, filename):\n s3_client.put_object(Body=data, Bucket=CLIMATE_DATA_BUCKET, Key=filename)" ]
[ "0.6985121", "0.69683963", "0.6922932", "0.6861065", "0.685234", "0.66070056", "0.65758073", "0.6551769", "0.6550309", "0.65369016", "0.64492005", "0.6376597", "0.6289341", "0.6281992", "0.6274749", "0.62634486", "0.6242343", "0.6241593", "0.62215537", "0.6221003", "0.6214576", "0.61882484", "0.6175024", "0.6171795", "0.61608255", "0.61605936", "0.6145137", "0.61305356", "0.61212057", "0.6101711" ]
0.7573898
0
Convert the code to a list of positions. E.g. "12" returns [1,2]
def codeToPositions(self, positions_code): # Pull out positions from the code requested_positions = [] for i in range(len(positions_code)): this_position = int(positions_code[i]) requested_positions.append(this_position) return requested_positions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_pos(self, pos):\r\n\r\n column = ord(pos[0]) - 97\r\n if len(pos) == 2:\r\n row = ord(pos[1]) - 49\r\n else:\r\n row = 9\r\n return [row, column]", "def toPosition(self, pos):\n return [ord(pos[0])-ord('a'), int(pos[1])]", "def toPosition(self, pos):\n return [ord(pos[0])-ord('a'), int(pos[1])]", "def _code_indices(self) -> Tuple[int, ...]:\n return tuple(idx for idx, seg in enumerate(self.segments) if seg.is_code)", "def extract_positions(lines):\n positions = []\n for line in lines:\n position = [int(s) for s in re.findall(r'-?\\d+', line)]\n positions.append(position)\n return positions", "def populate_code_list():\n\tletter_code_ST = \"JZIHGFEDCBA\"\n\tletter_code_FG = \"XWUTRQPNMLK\"\n\tfor pos in range(\n\t len(letter_code_ST)): #Interestingly, the values start from 0\n\t\tcode_ST.append(pos) # Number first\n\t\tcode_ST.append(letter_code_ST[pos])\n\tfor pos in range(len(letter_code_FG)):\n\t\tcode_FG.append(pos)\n\t\tcode_FG.append(letter_code_FG[pos])", "def intcode_parse(code):\n actual_code = code % 100\n parameter_piece = code - actual_code\n parameter_piece = parameter_piece // 100\n parameter_code_list = []\n\n while parameter_piece > 0:\n parameter_code_list.append(parameter_piece % 10)\n parameter_piece = parameter_piece // 10\n\n return (actual_code, parameter_code_list)", "def parse_positions(self, start_pos, end_pos):\r\n\r\n start_column = ord(start_pos[0]) - 97\r\n if len(start_pos) == 2:\r\n start_row = ord(start_pos[1]) - 49\r\n else:\r\n start_row = 9\r\n end_column = ord(end_pos[0]) - 97\r\n if len(end_pos) == 2:\r\n end_row = ord(end_pos[1]) - 49\r\n else:\r\n end_row = 9\r\n return [start_row, start_column, end_row, end_column]", "def convert2int(self,seq_pep):\n\t\treturn [self.aminoacids.index(pep) for pep in seq_pep]", "def get_pcode_list(self) -> List[str]:\n return self.pcodes", "def get_location_codes(scanner, input):\n matches = scanner.search_places(input)\n codes = []\n for i in matches[\"Places\"]:\n codes.append(i[\"PlaceId\"])\n return codes", "def _get_indexes(self):\n\n code_indexes = []\n for match in self.parser.ansi_regex.finditer(self._raw_string):\n code_indexes.extend(list(range(match.start(), match.end())))\n if not code_indexes:\n # Plain string, no ANSI codes.\n return code_indexes, list(range(0, len(self._raw_string)))\n # all indexes not occupied by ansi codes are normal characters\n char_indexes = [i for i in range(len(self._raw_string)) if i not in code_indexes]\n return code_indexes, char_indexes", "def seq_positions(seq, codon):\n\n positions = []\n i = 0\n\n while codon in seq[i:]:\n pos = seq.find(codon, i)\n positions.append(pos)\n i = pos + 1\n positions.sort()\n return positions", "def position(self):\n return self._pos.to_list()", "def code_to_sequences( self, ucode ):\n\t\t\n\t\tassert isinstance( ucode, unicode ), 'ucode must be unicode string!' \n\t\t\n\t\tfor uchar in ucode:\n\t\t\tif not( uchar in self._char39 ):\n\t\t\t\traise Barcode39Error( '%s char is not listed in Barcode39 characters [0..9,A..Z,space,9,-,.,$,/,+,%]' )\n\n\t\tresult = []\n\t\tfor uchar in ucode:\n\t\t\tresult = result + self.char_to_seq(uchar) \n\t\t\t\n\t\treturn result", "def convert_code_to_decimal(processed_code):\n converted_digits = []\n\n for index, digit in enumerate(processed_code):\n if not digit.isnumeric():\n digit = HEXADECIMAL_TO_DECIMAL[digit]\n converted_digits.append(int(digit))\n\n return converted_digits", "def codepoint_ords(self):\n pass", "def get_position(self):\n return list(self.position)", "def loc_to_coord(codes):\n def adfilter(codes):\n return re.findall(\"\"\"[a-zA-Z]+, [A-Z]{2}\"\"\", \";\".join(codes))\n\n api_key = \"AIzaSyCxQCjOrHFAf7T-W3vtUYqWkgSFkvMjxN4\"\n\n g = geocoders.GoogleV3(api_key = api_key)\n coords = {\"lat\":[], \"long\":[]}\n for code in adfilter(codes):\n if code != \"\":\n try:\n place = g.geocode(code)\n if place != None:\n coords[\"lat\"].append(place.latitude)\n coords[\"long\"].append(place.longitude)\n except (exc.GeocoderTimedOut, exc.GeocoderQueryError):\n pass\n return coords", "def get_character_position(character: dict) -> list:\r\n return character['Position']", "def getPosition(self):\n\t\txxx1 = self.stokes()\n\t\txxx2 = self.thp()\n\t\txxx3 = self.tthp()\n\t\treturn [xxx1, xxx2, xxx3]", "def get_position(pos):\n if type(pos) is str:\n return list(map(lambda x: float(x),pos.split(\",\")))\n return pos", "def get_values(self, code_block):\r\n pos_mode, imm_mode = 0, 1\r\n x, values = 1, []\r\n modes = self.get_modes(code_block)\r\n for mode in modes:\r\n if mode == pos_mode:\r\n values.append(int(self.codes[code_block[x]]))\r\n elif mode == imm_mode:\r\n values.append(int(code_block[x]))\r\n else: print('Error: Not a valid mode.')\r\n x += 1\r\n print('Get values: ')\r\n print(values)\r\n return values", "def mask_to_positions(maskstring):\r\n return nonzero(array(map(int, maskstring)))[0]", "def coordinates(self):\n logging.debug('Get coordinates from text')\n result = []\n blocks = self.del_comm(blocks=True)\n coor = re.compile('[FXYZ][+-]?[0-9]+(\\.[0-9]+)?')\n for line in blocks:\n coord_line = False\n comm = line.split()\n temp = []\n for c in comm:\n if c == 'G1':\n coord_line = True\n if coord_line and coor.match(c):\n temp.append(c)\n if temp:\n result.append(temp)\n return result", "def mask_to_positions(maskstring):\n return nonzero(array(map(int, maskstring)))[0]", "def to_list(bits: int) -> list[Position]:\n positions = []\n for r in range(8):\n for c in range(8):\n mask = pos_mask(r, c)\n if bits & mask > 0:\n positions.append(Position(r, c))\n return positions", "def opcode_list(self, script):\n opcodes = []\n new_pc = 0\n try:\n for opcode, data, pc, new_pc in self.get_opcodes(script):\n opcodes.append(self.disassemble_for_opcode_data(opcode, data))\n except ScriptError:\n opcodes.append(binascii.hexlify(script[new_pc:]).decode(\"utf8\"))\n\n return opcodes", "def positions_to_play(self):\r\n\r\n positions = []\r\n\r\n for i in range(0, len(self.matrix)):\r\n for j in range(0, len(self.matrix[i])):\r\n if self.matrix[i][j] == \"0\":\r\n # Add [row, column] to the list\r\n positions.append([i, j])\r\n \r\n return positions", "def research_pos(self, map_list, character): \n list_pos = []\n for y in range(15): \n for x, c in enumerate(map_list[y]):\n if character in c and c == character:\n list_pos.append((x*50, y*50)) \n return list_pos" ]
[ "0.6843231", "0.6796919", "0.6796919", "0.6607168", "0.65984535", "0.6562776", "0.6543961", "0.644775", "0.6356751", "0.6204404", "0.61959976", "0.60942286", "0.6035776", "0.5963504", "0.5939585", "0.59067464", "0.59013224", "0.5897137", "0.58854824", "0.588048", "0.5863592", "0.58533657", "0.583964", "0.58380246", "0.583775", "0.58221394", "0.58131665", "0.5799637", "0.57698417", "0.5724185" ]
0.76894224
0
Convert the state to a code. E.g. state 1 could be "01", depending on the servo setup
def stateToCode(self, state): multiplier = 1 code = "" for i in range(self.num_joints-1, -1, -1): num_angles = len(self.angles[i]) code += str(int((state / multiplier ) % num_angles)) multiplier *= len(self.angles[i]) # Return the reversed code return code [::-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_state_code(self, data) -> int:\n return int(self.state)", "def state_to_string(board_state):\n return str(board_state)", "def _status_to_state(status):\n if status == 'failed':\n return Finding.State.ACTIVE\n elif status == 'passed' or status == 'skipped':\n return Finding.State.INACTIVE\n else:\n return Finding.State.STATE_UNSPECIFIED", "def transformNumberToDNA(inputState):\n if inputState == 0:\n result = \"A\"\n elif inputState == 1:\n result = \"C\"\n elif inputState == 2:\n result = \"G\"\n elif inputState == 3:\n result = \"T\"\n else:\n raise ValueError(\"The input state is not valid as 0,1,2 or 3\") \n return result", "def state_encod_arch2(self, state, action):", "def status_to_event_code(status: str):\n return {\n \"sent\": \"txSent\",\n \"pending\": \"txPool\",\n \"pending-simulation\": \"txPoolSimulation\",\n \"stuck\": \"txStuck\",\n \"confirmed\": \"txConfirmed\",\n \"failed\": \"txFailed\",\n \"speedup\": \"txSpeedUp\",\n \"cancel\": \"txCancel\",\n \"dropped\": \"txDropped\",\n }[status]", "def get_new_state():\n state = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in xrange(32))\n return state", "def _get_state(self):\n print(\"GET STATE\")\n res = self._send_command(\n \"RS;\",\n fb_required=True,\n res_pattern=\"STATE:\")\n # The received answer is supposed to be something like\n # STATE:0|1|-1\n state = int(res.split(':')[1])\n if state == PVDriver.IDLE:\n return \"IDLE\"\n elif state == PVDriver.MOVING:\n return \"MOVING\"\n else:\n return \"ERROR\"", "def evalState(state):\n if state == PyTango.DevState.RUNNING:\n return State.Moving\n elif state == PyTango.DevState.STANDBY:\n return State.On\n else:\n return from_tango_state_to_state(state)", "def state_transl(state):\n nonlocal state_cnt\n nonlocal state_transl_dict\n\n if state not in state_transl_dict.keys():\n state_transl_dict[state] = state_cnt\n state_cnt += 1\n\n return str(state_transl_dict[state])", "def state_transl(state):\n nonlocal state_cnt\n nonlocal state_transl_dict\n\n if state not in state_transl_dict.keys():\n state_transl_dict[state] = state_cnt\n state_cnt += 1\n\n return str(state_transl_dict[state])", "def to_cwl_state_string(state: 'JobState') -> str:\n state_to_cwl_string = {\n JobState.SUBMITTED: 'Waiting',\n JobState.STAGING_IN: 'Waiting',\n JobState.WAITING: 'Running',\n JobState.RUNNING: 'Running',\n JobState.FINISHED: 'Running',\n JobState.STAGING_OUT: 'Running',\n JobState.SUCCESS: 'Success',\n JobState.STAGING_IN_CR: 'Waiting',\n JobState.WAITING_CR: 'Running',\n JobState.RUNNING_CR: 'Running',\n JobState.STAGING_OUT_CR: 'Running',\n JobState.CANCELLED: 'Cancelled',\n JobState.SYSTEM_ERROR: 'SystemError',\n JobState.TEMPORARY_FAILURE: 'TemporaryFailure',\n JobState.PERMANENT_FAILURE: 'PermanentFailure',\n }\n return state_to_cwl_string[state]", "def get_state(self, state):\n status = [u'noState', u'poweredOn', u'blocked', u'suspended', \n u'poweredOff', u'poweredOff', u'crashed']\n return status[int(state)]", "def state(self):\n if self._state is None:\n return None\n\n if self._sensor_type in [ATTR_CYCLE1_START, ATTR_CYCLE2_START, ATTR_TIME]:\n if self._state[0] == 255:\n return \"Disabled\"\n return '{:02d}:{:02d}'.format(self._state[0], self._state[1])\n elif self._sensor_type == ATTR_STATUS:\n return STATUS_CHOICES[self._state]\n\n return self._state", "def new_state():\n return ''.join(random.choice(string.ascii_uppercase + string.digits)\n for x in range(32))", "def native_value(self) -> str:\n if isinstance(self._state, Enum):\n return self._state.name.lower()\n return self._state.lower()", "def _convert_axis_state_format_ps90(self, axis_state_OWIS: int) -> int:\n if axis_state_OWIS in [0, 1, 2]:\n return 0\n elif axis_state_OWIS == 3:\n return 1\n else:\n return 0", "def code(action_sequence):\r\n # refuse any invalid action :\r\n if set(action_sequence) - set(CODE_MAP): # some action was not in the known ones.\r\n return '0' # per spec (test_unknown_action)\r\n\r\n mapdict = dict(zip(CODE_MAP, (1, 2, 4, 8)))\r\n da_code = [mapdict[action] for action in action_sequence]\r\n if sorted(da_code) != da_code: # list is not sorted : assume reversed\r\n da_code.append(16)\r\n return format(sum(da_code), 'b') # sum to integer, binstring, and return", "def encode(self, game_state: ssm.SnakeStateMachine) -> int:\n state = [e.encode(game_state) for e in self._encoders]\n return self._state2id[tuple(state)]", "def genStatesCode(self):\n for s, d in self.info['machine'].items():\n transition_code_arr = []\n for t in self.info['transitionnames']:\n tcode = self.tmpl['transition_none'].render(transition=t)\n if t in d['transitions'].keys():\n tcode = self.tmpl['transition_ok'].render(transition=t, post=d['transitions'][t])\n transition_code_arr.append(tcode)\n transition_code = '\\n'.join(transition_code_arr) + '\\n'\n state_code = self.tmpl['concreatestate'].render(state=s.title(), description=d['description'])\n self.codeinfo[s.lower()+\".py\"] = '\\n'.join([state_code, transition_code]) + '\\n'", "def state_to_char(observation):\n if observation == self.TileState.CLEAN.value:\n return \"-\"\n if observation == self.TileState.DIRTY.value:\n return \"d\"\n if observation == self.TileState.BOT.value:\n return \"b\"", "def msgCode(self):\n return self._MsgCodeDict[self.state]", "def state(self, state: str) -> None:", "def canonical_ctrl_state(ctrl_state, num_qubits):\n if not num_qubits:\n return ''\n\n if isinstance(ctrl_state, CtrlAll):\n if ctrl_state == CtrlAll.One:\n return '1' * num_qubits\n return '0' * num_qubits\n\n if isinstance(ctrl_state, int):\n # If the user inputs an integer, convert it to binary bit string\n converted_str = f'{ctrl_state:b}'.zfill(num_qubits)[::-1]\n if len(converted_str) != num_qubits:\n raise ValueError(\n f'Control state specified as {ctrl_state} ({converted_str}) is higher than maximum for {num_qubits} '\n f'qubits: {2 ** num_qubits - 1}'\n )\n return converted_str\n\n if isinstance(ctrl_state, str):\n # If the user inputs bit string, directly use it\n if len(ctrl_state) != num_qubits:\n raise ValueError(\n f'Control state {ctrl_state} has different length than the number of control qubits {num_qubits}'\n )\n if not set(ctrl_state).issubset({'0', '1'}):\n raise ValueError(f'Control state {ctrl_state} has string other than 1 and 0')\n return ctrl_state\n\n raise TypeError('Input must be a string, an integer or an enum value of class State')", "def _get_state(self):\n start = self.design.first_unassigned_site\n return self.target.padded_encoding[\n start : start + 2 * self._env_config.state_radius + 1\n ]", "def get_state(self, state):\n return state", "def state(self) -> str:", "def get_lookup_state(self, state):\n return \"\".join(map(str, state))", "def get_board_state(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ''.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n return board_state", "def _to_code(rgb):\n code = 0\n if 'r' in rgb:\n code += 1\n\n if 'g' in rgb:\n code += 2\n\n if 'b' in rgb:\n code += 4\n return code" ]
[ "0.7099449", "0.64658976", "0.6341494", "0.630235", "0.6236514", "0.61942995", "0.61780244", "0.61468345", "0.6120422", "0.6093492", "0.6093492", "0.6088677", "0.6056049", "0.6005784", "0.59579587", "0.5882401", "0.5843271", "0.58395547", "0.58130515", "0.58092207", "0.5807223", "0.57959116", "0.57713217", "0.576515", "0.5739544", "0.57086706", "0.5661216", "0.5647613", "0.5631623", "0.5623672" ]
0.74284816
0
Generates a sample of measures, to be fed into the neural net. sample_size is the number of distinct images of measures multiplicity is the number of times that measure should be augmented, so the actual samples size is sample_sizemultiplicity sample_name is the name of the folder in which the sample data is deposited height and width are the heights and widths of the images of measures for the keys, see the files generate_rhtyhm and generate_chords rest_prob indicates the frequency of rests vs notes the sampler randomly selects from the given key and measure number choices
def generate_sample(sample_size, multiplicity, sample_name, height, width, treble_tp_key, bass_tp_key, treble_cp_key, bass_cp_key, rest_prob, measure_length_choices=(8, 12, 16), key_number_choices=tuple(range(-7, 8))): t = time.time() if not os.path.exists(sample_name + '/'): os.mkdir(sample_name + '/') # the neural net needs images of measures, pseudocode for those images, and the key and time signatures of those measures pc_data = [] images = [] measure_lengths = [] key_numbers = [] for i in range(sample_size): print(f'{i}/{sample_size} time: {time.time() - t} seconds') # pick random key and time signatures measure_length = np.random.choice(measure_length_choices) key_number = np.random.choice(key_number_choices) # generate the xml for a measure soup = generate_measure(measure_length, key_number, rest_prob, treble_tp_key, bass_tp_key, treble_cp_key, bass_cp_key) # write the xml to a file with open('temp.musicxml', 'w+') as f: f.write(str(soup)) # call musescore to convert the xml to a png and and svg subprocess.call(['mscore', '-S', str(pathlib.Path.home()) + '/Documents/MuseScore2/Styles/custom_style.mss', 'temp.musicxml', '-o', 'temp.mscx']) subprocess.call(['mscore', 'temp.mscx', '-o', 'temp.png']) subprocess.call(['mscore', 'temp.mscx', '-o', 'temp.svg']) png_path = 'temp-1.png' svg_path = 'temp-1.svg' # create multiple copies of the measure, with the image distorted in different ways for _ in range(multiplicity): # use the png and svg to appropriately crop the image image = crop(png_path, svg_path) # distort the image image = random_augmentation(image, height, width) # make sure the image is encoded as an integer, to save space image = (image*255).astype(np.uint8) # add the image data to the lists images.append(image) measure_lengths.append(measure_length) key_numbers.append(key_number) # record the pseucode, instead of the xml # this line has to go down here since xml_to_pc changes soup in place pc = ['<START>'] + xml_to_pc(soup) + ['<END>'] for _ in range(multiplicity): pc_data.append(pc) # remove the files used in constructing the sample item os.remove('temp.musicxml') os.remove('temp.mscx') os.remove('temp-1.png') os.remove('temp-1.svg') # after generating the sample, save it to a folder with open(sample_name + '/' + 'pc_data.json', 'w+') as f: json.dump(pc_data, f) measure_lengths = np.array(measure_lengths) key_numbers = np.array(key_numbers) images = np.array(images) np.save(sample_name + '/' + 'measure_lengths.npy', measure_lengths) np.save(sample_name + '/' + 'key_numbers.npy', key_numbers) np.save(sample_name + '/' + 'images.npy', images) time_elapsed = time.time() - t # save info about the sample to a text file info = {'sample_name': sample_name, 'sample_size': sample_size, 'multiplicity': multiplicity, 'treble_tp_key': treble_tp_key, 'bass_tp_key': bass_tp_key, 'treble_cp_key': treble_cp_key, 'bass_cp_key': bass_cp_key, 'rest_prob': rest_prob, 'measure_length_choices': list(measure_length_choices), 'key_number_choices': list(key_number_choices), 'time_elapsed': time_elapsed} with open(sample_name + '/' + 'info.json', 'w+') as f: json.dump(info, f)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_samples(self, n_samples):", "def generate_samples(self, n_samples):", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n pass", "def sample(self, sample_size: int):\n self.data = random.sample(self.data, sample_size)", "def sample(self, batch_size):\n raise NotImplementedError", "def sample(self, sample_size: int = 20):\n return self._adapter.sample(sample_size)", "def sampleDocuments(sample_size = 250):\n\n\tdocuments_csv_filepath = getScriptDirectory() + \"/result/documents.csv\"\n\tsampled_documents_filepath = getScriptDirectory() + \"/result/sampled_documents.csv\"\n\tsample_statistics_filepath = getScriptDirectory() + \"/result/statistics.csv\"\n\n\t# Check if the sample already exists\n\tif(os.path.isfile(sampled_documents_filepath)):\n\t\tprint \"Sample already exists. Moving on to getting metadata.\"\n\n\t\treturn\n\n\t# Check if result folder has been made\n\tresult_folder = getScriptDirectory() + \"/result\"\n\n\tif not os.path.exists(result_folder):\n\t\tos.makedirs(result_folder)\n\n\n\t# Read in the existing documents\n\tdocuments = pandas.read_csv(documents_csv_filepath)\n\n\tunique_mns = pandas.unique(documents['authoritativeMN'])\n\tsampled_documents = pandas.DataFrame({'identifier' : [], 'authoritativeMN' : []})\n\n\tfor mn in unique_mns:\n\t\tdf_subset = documents[documents.authoritativeMN == mn]\n\t\tnrows = df_subset.shape[0]\n\n\t\tprint(\" Member node \" + mn + \" has \" + str(nrows) + \" rows\")\n\n\t\tif nrows is 0:\n\t\t\tcontinue\n\t\telif nrows is 1:\n\t\t\tsampled_rows = [0]\n\t\telse:\n\t\t\tif nrows > sample_size:\n\t\t\t\trows_to_sample = range(0, nrows)\n\t\t\t\tsampled_rows = numpy.random.choice(rows_to_sample, sample_size)\n\t\t\telse:\n\t\t\t\tsampled_rows = range(0, nrows)\n\n\t\tdf_subset_filtered = df_subset.iloc[sampled_rows,:]\n\n\t\tsampled_documents = pandas.concat([sampled_documents, df_subset_filtered])\n\n\tsampled_documents.groupby([\"authoritativeMN\"]).aggregate(['count']).to_csv(sample_statistics_filepath, encoding = \"utf-8\")\n\tsampled_documents.to_csv(sampled_documents_filepath, index = False, encoding = \"utf-8\")\n\n\treturn", "def sample(self, sample_size):\n m = self.a.shape[0]\n ss = min(m, sample_size)\n indices = np.arange(m)\n random_indices = _random_batch(indices, ss)\n return _build_prediction_Report(self.name, self.x, self.y, self.a, random_indices)", "def sample(self, num_samples, **kwargs):\n pass", "def random_sample(input_name):\n\t#Count number of lines in original file\n\twith open(input_name) as f:\n\t\told_size = len(f.readlines())\n\t#Determine number of lines for new file\n\tnew_size=int(round(sum(1 for row in open(input_name))* args.rnd_sample))\n\t#Create name for sub-sampled file\n\tSampledFileName, SampledExten = os.path.splitext(input_name)\n\tSampledName = '%s_smpld%s' % (SampledFileName,SampledExten)\n\t#Randomly select the desired number of lines and print to new file\n\twith open(SampledName,\"wb\") as sink:\n\t\tfor i in random.sample(range(0, old_size), new_size):\n\t\t\tsink.write(linecache.getline(input_name, i))\n\tlinecache.clearcache()", "def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n for sample_idx, sample_size in zip(range(num_samples), cycle(test_set_sizes)):\n yield random.sample(test_universe, sample_size)", "def Sampling(self, path, number):\n allfiles = os.listdir(path)\n for image_name in allfiles:\n number_label = image_name.split('.')[0].split('_')[0]\n self.label_file_map[number_label].append(os.path.join(path, image_name))\n \n # 将样本均匀随机抽样切割成训练集合和测试集合\n training_set = collections.defaultdict(list)\n testing_set = collections.defaultdict(list)\n for label in self.label_file_map:\n file_list = self.label_file_map[label]\n training_set[label] = [file_list[random.randint(0,len(file_list)-1)] for i in range(number)] \n testing_set[label] = set(file_list) - set(training_set[label])\n\n train_x, train_y = self._generate_data_label_pair(len(training_set)*number, 68*68, training_set)\n test_total_num = 0\n for elt in testing_set:\n test_total_num += len(testing_set[elt])\n test_x, test_y = self._generate_data_label_pair(test_total_num, 68*68, testing_set)\n return (train_x, train_y, test_x, test_y)", "def test_sample_niw_size(self):\n size = (1, 2, 3)\n mu, Sigma = niw.sample_niw(self.params, size)\n self.assertTrue(mu.shape == size + (self.n, ) and Sigma.shape == size + (self.n, self.n))", "def sample(self, *size):\n if len(size) == 1 and isinstance(size[0], Sequence):\n size = size[0]\n size = list(size) + [self.dim]\n\n sample = torch.randn(size, device=self.mu.device) * self.sigma2.sqrt() + self.mu\n return sample", "def sample(self, sample_size: int):\n raise NotImplemetedError(\"AtomisticDataset.sample is not implemeneted.\")\n\n # self.data = random.sample(self.data, sample_size)", "def generate_samples(self, nsamples):\n assert self.trained, \"Model must first be fitted to some data.\"\n logger.debug(f'Generate synthetic dataset of size {nsamples}')\n synthetic_data, _ = self.gm.sample(nsamples)\n return synthetic_data", "def create_samples(self):\n for s_id in range(len(self.data[\"sample\"])):\n self.samples.add(Sample(s_id, [self.data[key][s_id] for key in self.data.keys() if key not in WRONG_KEYS],\n self.data[\"label\"][s_id]))", "def make_map(self, sampling=None, size=None):\n if sampling is None and self.sampling is None:\n self.sampling = self.minimum_sampling()\n elif sampling is not None:\n self.sampling = sampling\n \n if size is None and self.size is None:\n self.size = self.minimum_size()\n elif size is not None:\n self.size = size\n\n # Set the number of pixels that spans the full size requested\n pixsize = numpy.ceil(self.size/self.sampling).astype(int)\n # Force the pixel size to be odd\n if pixsize % 2 == 0:\n pixsize += 1\n # Adjust the size to be an integer number of pixels\n _size = pixsize*self.sampling\n if _size - self.size > 0.1*self.sampling:\n warnings.warn('Size reset to an integer number of pixels: '\n ' {0} -> {1} arcsec'.format(self.size, _size))\n self.size = _size\n self.y = (pixsize-1)*numpy.linspace(-0.5,0.5,pixsize)*self.sampling\n self.x = self.y.copy()[::-1]\n\n # Sample it\n self.X, self.Y = numpy.meshgrid(self.x, self.y)\n self.data = self.__call__(self.X, self.Y)", "def create_materials(endpoint):\n for phenotype in get_phenotypes(endpoint):\n print(phenotype)\n # for now, creating the sample name combining studyDbId and potDbId -\n # eventually this should be observationUnitDbId\n sample_name = phenotype['studyDbId']+\"_\"+phenotype['plotNumber']\n this_sample = Sample(name=sample_name)\n that_source = Source(phenotype['germplasmName'], phenotype['germplasmDbId'])\n this_sample.derives_from = that_source", "def sample(self, batchsize, *args, **kwargs):\n raise NotImplementedError", "def _generate(self, **kwargs):\n N = self.parameter_schema['N']\n parameter_count = len(self._parameter_names)\n common_override_kwargs = {}\n override_kwargs = self._sampler_overrides(common_override_kwargs)\n if kwargs:\n kwargs.update(override_kwargs)\n else:\n kwargs = override_kwargs\n __import__(\"SALib.sample\", fromlist=[self.sampler_class])\n sampler = getattr(SALib.sample, self.sampler_class)\n problem = self.parameter_schema[\"problem\"]\n self._samples = sampler.sample(problem, N, **kwargs)\n self._samples = numpy.unique(self._samples, axis=0)\n super()._generate()", "def test(batch_size=1, num_sample=16):\n return paddle.batch(_read_creater(num_sample=num_sample), batch_size)", "def size_rand_sample(size):\n\n assert size > 0\n @sinks\n def _dagpype_internal_fn_act(target):\n i = 0\n sample = None\n try:\n while True:\n e = (yield)\n sample = [e] * size if i == 0 else [e if random.randint(0, i) == 0 else ee for ee in sample]\n i += 1\n except GeneratorExit:\n if sample is not None:\n target.send(sample)\n target.close()\n\n return _dagpype_internal_fn_act", "def get_disc_data_sets(sample_size):\n log.info(\"MAKING DATA SETS\")\n\n fakes = generate_fakes(int(sample_size / 2))\n reals = get_reals(int(sample_size / 2))\n total = numpy.concatenate((reals, fakes))\n real_labels = numpy.ones([len(reals)])\n fake_labels = numpy.zeros([len(fakes)])\n total_labels = numpy.concatenate((real_labels, fake_labels))\n return get_discriminator_splits(total, total_labels)", "def create_samples(self, skills_sample_fraction=1.0, users_sample_fraction=1.0):\n # Sampling\n self.sample_skills_to_be_covered(skills_sample_fraction)\n self.sample_users(users_sample_fraction)", "def test_sample(system_generator):\n\n name, test = system_generator()\n print(name)\n\n w_F, w_R, N_k = test.sample([10, 8], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([1, 1], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([10, 0], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([0, 5], mode=\"wFwR\")" ]
[ "0.64130974", "0.64130974", "0.6090398", "0.6090398", "0.6090398", "0.6090398", "0.6069997", "0.60332435", "0.59508437", "0.59355867", "0.58917224", "0.5861272", "0.58172125", "0.5764265", "0.57515836", "0.5744843", "0.5720441", "0.5713661", "0.5708281", "0.5684648", "0.56315875", "0.5623373", "0.56140447", "0.5602784", "0.55907893", "0.5587064", "0.55843157", "0.5558725", "0.55350196", "0.55258334" ]
0.78432107
0
Create a GeneticInstance from the GUI parameters
def createGeneticInstance(self, context): genetic_instance = GeneticInstance(ParametricLSystem(self.seed)) fromBlenderToGeneticInstance(self,genetic_instance) return genetic_instance
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, params, fitness, population_size=20, generations=20, temperature_factor=0.9):\n self.params = params\n self.fitness = fitness\n self.population_size = population_size\n self.generations = generations\n self.temperature_factor = temperature_factor\n\n self.population = []", "def __init__(self, genotype):\n\n\t\t# instantiate the root\n\t\ttk.Tk.__init__(self)\n\t\tself.title(\"CPPN playground\")\n\n\t\t# create container to hold all grames in the GUI\n\t\tcontainer = tk.Frame(self)\n\t\tcontainer.pack(side=\"top\", fill=\"both\", expand=True)\n\t\tcontainer.grid_rowconfigure(0, weight=1)\n\t\tcontainer.grid_columnconfigure(0, weight=1)\n\n\t\t# initiate GUI\n\n\t\t# add menu bar for the separate Tk frame\n\t\tmenubar = tk.Menu(self)\n\t\tfilemenu = tk.Menu(menubar)\n\t\tfilemenu.add_command(label=\"Main Page\", command=lambda: self.raise_frame(\"MainPage\", container))\n\t\tfilemenu.add_command(label=\"Slider Page\", command=lambda: self.raise_frame(\"SliderPage\", container))\n\t\tfilemenu.add_command(label=\"Save\", command=lambda: save_gen_GUI(genotype))\n\t\tmenubar.add_cascade(label=\"Options\", menu=filemenu)\n\t\tself.config(menu=menubar)\n\n\t\t# add frames to the main GUI\n\t\tself.frames = {}\n\n\t\t# create main frame\n\t\tframe1 = MainPage(container=container, master=self, genotype=genotype)\n\t\tself.frames[\"MainPage\"] = frame1\n\t\tframe1.grid(row=0, column=0, stick=\"nsew\")\n\n\t\t# raise main page to the front initially\n\t\tself.raise_frame(\"MainPage\", container)", "def __init__(self, _populationSize, _chromosomeClass):\n # a generation is a collection of chromosomes stored in a priority queue\n # which is ordered by fitness\n self.generation = PriorityQueue()\n # store how many chromosomes are in each generation\n self.populationSize = _populationSize\n # store a template for generating chromosomes\n self.chromosomeClass = _chromosomeClass\n # choose a random starting population\n self.randomPopulation()", "def __init__(self, ggui):\n self.gui: gamegui = ggui\n super().__init__(self.gui.top, \"New Game\")", "def generate(gui):\n\n global robot_obj\n global ftm_list\n global btm_list\n global fk_list\n global jac_list\n global com_list\n global com_jac_list\n\n ftm = ftm_list if gui.checkBox_ftm.isChecked() else []\n btm = btm_list if gui.checkBox_btm.isChecked() else []\n fk = fk_list if gui.checkBox_fk.isChecked() else []\n jac = jac_list if gui.checkBox_jac.isChecked() else []\n com = com_list if gui.checkBox_com.isChecked() else []\n com_jac = com_jac_list if gui.checkBox_com_jac.isChecked() else []\n\n language = Language(settings[\"language\"])\n optimization_level = settings[\"optimization_level\"]\n\n generate_everything(robot_obj, ftm, btm,\n fk, jac, com, com_jac,\n polynomial_trajectories,\n control_loops_list,\n optimization_level,\n language,\n path + '../generated/' + settings[\"filename\"],\n progressbar=gui.progressBar)", "def create_test_instance(cls, **kwargs):\n # create a instance with random parameters\n obj = super(LibraryBinaryNumeric, cls).create_test_instance(**kwargs)\n # choose an optimal interaction matrix\n obj.choose_sensitivity_matrix('auto')\n return obj", "def __init__(self, configs, simulator, wait_time=3):\n self.configs = configs\n self.sim = simulator.sim\n self.gripper = VREP_Gripper()\n self.open()", "def New(*args, **kargs):\n obj = itkRandomVariateGeneratorBase.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def new(self):\n self.__buttons.setDisabled(False)\n self.__service = None\n self.name.setFocus()\n self.name.setText(\"\")\n self.threadable.setChecked(False)\n self.min_cores.setValue(100)\n self.max_cores.setValue(100)\n self.min_memory.setValue(3276)\n self.min_gpu_memory.setValue(self.gpu_min_mb)\n self.timeout.setValue(0)\n self.timeout_llu.setValue(0)\n self.min_memory_increase.setValue(2048)\n self._tags_w.set_tags(['general'])", "def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)\n self.personal_best = self.genes.copy", "def __init__(self, grid, estimator, parameter_search, **kwargs):\n self.kwargs = kwargs\n self.grid = grid\n self.estimator = estimator\n self.parameter_search = parameter_search", "def __init__(self):\n import arcgisscripting as ARC\n self.GP = ARC.create(9.3)\n self.params = self.GP.getparameterinfo()", "def __init__(self,\n learning_rate=0.001,\n beta1=0.9,\n use_locking=False,\n name=\"GGT\",\n window=10,\n eps=1e-4,\n svd_eps=1e-6,\n sigma_eps=1e-2):\n super(GGTOptimizer, self).__init__(use_locking, name)\n self._set_hyper(\"lr\", learning_rate)\n self._set_hyper(\"beta1\", beta1)\n self._set_hyper(\"window\", window)\n self._set_hyper(\"eps\", eps)\n self._set_hyper(\"svd_eps\", svd_eps)\n self._set_hyper(\"sigma_eps\", sigma_eps)\n\n self.index_dict = {}\n self.shape_dict = {}", "def help_create(self):\n print(\"create instances\")", "def create_custom():\n # Extract initialisation parameters\n alpha = request.args.get('alpha')\n alpha = float(alpha)\n generations = request.args.get('generations')\n generations = int(generations)\n beta = request.args.get('beta')\n beta = float(beta)\n pec = request.args.get('pec')\n pec = float(pec)\n q = request.args.get('q')\n q = float(q)\n\n # Extract the custom coordinates and create a list of nodes\n coords = request.args.get('custom_coords')\n coords = str(coords)\n nodes = custom_nodes(coords)\n\n # Initialise instance\n i = Instance(nodes, alpha, beta, pec, q)\n\n return jsonify(nodes=i.nodes, alpha=i.alpha, beta=i.beta, decay=i.decay,\n min_pheromone=i.min_pheromone, q=i.q,\n local_deposit=i.local_deposit, distances=i.distances,\n pheromones=i.pheromones, ants=i.ants, shortest_path=i.shortest_path,\n min_distance=i.min_distance, message=\"Instance Initialised\")", "def create_instance(\n self, base_config: dict, labels: dict, wait_for_operation: bool = True\n ) -> Tuple[dict, str]:\n return", "def mainGA(NAME, target_output, target_image): \n global toolbox\n\n print(\"Target image: {0} Target output: {1}\".format(target_image, target_output)) \n sys.stdout.flush()\n\n model = load_model(NAME) \n fit = Fitness(NAME, model, target_image, target_output)\n\n #Genetic operators \n toolbox.register(\"evaluate\", fit.evaluate)\n toolbox.register(\"mate\", cxTwoPointCopy) \n #toolbox.register(\"mate\", cxUniform)\n toolbox.register(\"mutate\", tools.mutGaussian, mu=0.0, sigma=0.1, indpb=0.05)\n toolbox.register(\"select\", tools.selTournament, tournsize=3)\n \n\n pop = toolbox.population(n=50)\n hof = tools.HallOfFame(1, similar=np.array_equal)\n \n #stats = tools.Statistics(lambda ind: ind.fitness.values)\n #stats.register(\"avg\", np.mean)\n #stats.register(\"std\", np.std)\n #stats.register(\"min\", np.min)\n #stats.register(\"max\", np.max)\n \n pop, log = algorithms.eaSimple(pop, toolbox, cxpb=CXPB, mutpb=MUTPB, \n ngen=NGEN, halloffame=hof, \n verbose=False)\n\n return hof[0]", "def __init__(self, params):\r\n _params = {'max_e_value': 1e-3,\r\n 'seqs_per_blast_run': 1000,\r\n 'Similarity': 0.75,\r\n 'min_aligned_percent': 0.50,\r\n 'blast_program': 'blastx',\r\n 'is_protein': True}\r\n _params.update(params)\r\n OtuPicker.__init__(self, _params)", "def __init__(self,estimator, param = None):\n self.estimator=estimator", "def launch_GUV_GUI(self):\n for i in self.parameters['selected_series']:\n print(f\"Analysing series {i}\")\n self.stack.bundle_axes = 'yx'\n finderparams = ParameterList(filename=self.parameters['filename'],\n channel=self.parameters['channel'],\n intensity_channel=self.parameters['intensity_channel'],\n pixel_microns=self.parameters['pixel_microns'])\n if self.has_multiple_series:\n self.stack.default_coords['v'] = i\n finderparams.series = i\n GUV_Control(self.stack, finderparams) # launch the GUI that can find GUVs and let the user remove them\n \n self.quit()", "def __init__(self,grib,config):\r\n self.grib_file_path = grib\r\n self.member_name = ModelData.get_member_name_from_path(grib)\r\n if self.member_name not in ModelData.member_names:\r\n ModelData.member_names.append(self.member_name)\r\n self.config = config\r\n ModelData.instances.append(self)\r\n return", "def initGui(self):\n from p4_view import Gui\n self.updateStatus(\"Launching GUI...\")\n self.gui = Gui(self, self.lmap)\n self.gui.setStart(self.cfg[\"START\"])\n self.gui.setGoal(self.cfg[\"GOAL\"])\n self.gui.setPossGoals(self.cfg[\"POSS_GOALS\"])\n #GHD\n self.gui.setMapName(self.cfg[\"MAP_FILE\"])\n self.updateStatus(\"OK\")\n self.gui.mainloop()", "def __init__(self, chromo = None, generation = None):\n\t\tsuper(self.__class__, self).__init__(chromo, generation)\n\n\t\t# Subclass-specific member vars\n\t\tself.image = None\n\t\tself.genesAdd = 0\n\t\tself.genesRem = 0\n\t\tself.imported = False # Flag individuals imported from another process", "def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)", "def __init__(self,name,speed,depth_of_view,view_angle,x_coor = \"\",y_coor = \"\"):\n self.name = name\n self.speed = speed # That will the instantenous speed of the robot\n self.depth_of_view = depth_of_view # That will the instantenous depth of view of the robot\n self.view_angle = view_angle # That will the instantenous view angle of the robot\n self.type = \"Robot\" #Specift the object type\n self.x = x_coor # store the position of the robot\n self.y = y_coor # store the position of the robot\n self.kind = name #Store its kind to give the GUI", "def __init__(self, params):\r\n _params = {'max_e_value': 1e-10,\r\n 'seqs_per_blast_run': 1000,\r\n 'Similarity': 0.97,\r\n 'min_aligned_percent': 0.50,\r\n 'blast_program': 'blastn',\r\n 'is_protein': False}\r\n _params.update(params)\r\n OtuPicker.__init__(self, _params)", "def __init__(self, height=20, width=20,\n initial_victims=100,\n initial_aggressors=5,\n initial_policepersons=5,\n police_letality=0.5,\n prob_victims_have_gun=0.2,\n reaction_if_has_gun=0.85,\n chance_death_gun=0.85):\n super().__init__()\n # Set parameters\n self.height = height\n self.width = width\n self.initial_victims = initial_victims\n self.initial_aggressors = initial_aggressors\n self.initial_policepersons = initial_policepersons\n\n self.prob_victims_have_gun = prob_victims_have_gun\n self.reaction_if_has_gun = reaction_if_has_gun\n self.chance_death_gun = chance_death_gun\n\n self.police_letality = police_letality\n\n self.schedule = RandomActivationByBreed(self)\n self.grid = MultiGrid(self.height, self.width, torus=True)\n # It is here that we provide data for the DataCollector. It will be then retrieved from key \"Wolves\" within\n # ChartModule at server.py and sent ver to as a canvas element\n self.datacollector = DataCollector(\n {\"Aggressors\": lambda m: m.schedule.get_breed_count(Aggressor),\n \"Victims\": lambda m: m.schedule.get_breed_count(Victim),\n \"Policepersons\": lambda m: m.schedule.get_breed_count(Police)})\n\n # Create victims:\n for i in range(self.initial_victims):\n x = self.random.randrange(self.width)\n y = self.random.randrange(self.height)\n has_gun = True if self.random.random() < self.prob_victims_have_gun else False\n victim = Victim(self.next_id(), (x, y), self, True, has_gun)\n self.grid.place_agent(victim, (x, y))\n self.schedule.add(victim)\n\n # Create police:\n for i in range(self.initial_policepersons):\n x = self.random.randrange(self.width)\n y = self.random.randrange(self.height)\n has_gun = True\n bobby = Police(self.next_id(), (x, y), self, True, has_gun)\n self.grid.place_agent(bobby, (x, y))\n self.schedule.add(bobby)\n\n # Create aggressors\n for i in range(self.initial_aggressors):\n x = self.random.randrange(self.width)\n y = self.random.randrange(self.height)\n has_gun = True\n aggressor = Aggressor(self.next_id(), (x, y), self, True, has_gun)\n self.grid.place_agent(aggressor, (x, y))\n self.schedule.add(aggressor)\n\n self.running = True\n self.datacollector.collect(self)", "def make_instance(scene, features, params, instances, direction,\n focus_measures, classification, weight):\n evaluator = action_feature_evaluator(focus_measures, scene.step_count)\n\n # Randomly select only a subset of instances based on their weight.\n if params.outlierHandling == OutlierHandling.SAMPLING:\n if random.random() <= weight * params.uniformSamplingRate:\n instance = ( [ evaluator(feature) for _, feature in features ], \n classification )\n instances.append(instance)\n elif params.outlierHandling == OutlierHandling.WEIGHTING:\n if random.random() <= params.uniformSamplingRate:\n instance = ( [ evaluator(feature) for _, feature in features ], \n classification, weight )\n instances.append(instance)\n else:\n assert False", "def __init__(self, gui, path, *args):\n pass", "def _new_instance(self):\n return self.__class__(self._vmodule)" ]
[ "0.5936202", "0.5764224", "0.5752127", "0.57048696", "0.56787145", "0.5638419", "0.5532541", "0.55181456", "0.54791015", "0.54660714", "0.5462964", "0.54509676", "0.5433748", "0.54269344", "0.5425451", "0.5401366", "0.5400063", "0.539114", "0.53665835", "0.53568083", "0.5352749", "0.53459567", "0.5338196", "0.53292716", "0.5316385", "0.5314392", "0.52880144", "0.5285412", "0.52768886", "0.52393615" ]
0.7237641
0
Construct a 95% confidence ellipse using PCA.
def ci95_ellipse(data, type="pop"): # Build and fit PCA model pca = PCA() pca.fit(data) coeff = pca.components_ score = pca.transform(data) eigvals = pca.explained_variance_ # Calculate rotation angle phi = math.atan2(coeff[0, 1], coeff[0, 0]) # This angle is between -pi and pi. # Let's shift it such that the angle is between 0 and 2pi if phi < 0: phi += 2 * math.pi # Get the coordinates of the data mean n = len(data) m = np.mean(data, axis=0) x0 = m[0] y0 = m[1] # Get the 95% confidence interval error ellipse # inverse of the chi-square cumulative distribution for p = 0.05 & 2 d.f. = 5.9915 chisquare_val = 5.9915 if type is "pop": a = math.sqrt(chisquare_val * eigvals[0]) b = math.sqrt(chisquare_val * eigvals[1]) elif type is "mean": a = math.sqrt(chisquare_val * eigvals[0] / n) b = math.sqrt(chisquare_val * eigvals[1] / n) else: raise ValueError("type has to be 'pop' or 'mean'.") # the ellipse in x and y coordinates theta_grid = np.linspace(0, 2 * math.pi, num=100) ellipse_x_r = a * np.cos(theta_grid) ellipse_y_r = b * np.sin(theta_grid) # Define a rotation matrix R = np.array([[np.cos(phi), np.sin(phi)], [-np.sin(phi), np.cos(phi)]]) # let's rotate the ellipse to some angle phi r_ellipse = np.dot(np.vstack((ellipse_x_r, ellipse_y_r)).T, R) # Draw the error ellipse x = r_ellipse[:, 0] + x0 y = r_ellipse[:, 1] + y0 ellipse = np.stack((x, y), axis=1) outside = [] for i in range(len(score)): metric = (score[i, 0] / a) ** 2 + (score[i, 1] / b) ** 2 if metric > 1: outside.append(1) else: outside.append(0) return ellipse, outside
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pca(data):\n mean = data.sum(axis=0) / data.shape[0]\n # show_image(mean)\n cv_matrix = np.cov(data.T)\n e_values, e_vectors = la.eig(cv_matrix)\n return e_values, e_vectors.T, mean", "def pca(X = Math.array([]), no_dims = 50):\n\n print \"Preprocessing the data using PCA...\"\n (n, d) = X.shape;\n X = X - Math.tile(Math.mean(X, 0), (n, 1));\n (l, M) = Math.linalg.eig(Math.dot(X.T, X));\n Y = Math.dot(X, M[:,0:no_dims]);\n return Y;", "def pca(x):\n\t\n\tx = (x - x.mean(axis = 0)) # Subtract the mean of column i from column i, in order to center the matrix.\n\t\n\tnum_observations, num_dimensions = x.shape\n\t\n\t# Often, we have a large number of dimensions (say, 10,000) but a relatively small number of observations (say, 75). In this case, instead of directly computing the eigenvectors of x^T x (a 10,000 x 10,000 matrix), it's more efficient to compute the eigenvectors of x x^T and translate these into the eigenvectors of x^T x by using the transpose trick. \n\t# The transpose trick says that if v is an eigenvector of M^T M, then Mv is an eigenvector of MM^T.\n\t# We arbitrarily select \"100\" as the switching threshold. Another approach is to switch by comparing num_observations and num_dimensions.\n\tif num_dimensions > 100:\n\t\teigenvalues, eigenvectors = linalg.eigh(dot(x, x.T))\n\t\tv = (dot(x.T, eigenvectors).T)[::-1] # Unscaled, but the relative order is still correct.\n\t\ts = sqrt(eigenvalues)[::-1] # Unscaled, but the relative order is still correct.\n\telse:\n\t\tu, s, v = linalg.svd(x, full_matrices = False)\n\t\t\n\treturn v, s", "def getPCA(data):\n #covM = np.cov(data.T) #note that np.cov define row as variables, col as observations\n #corM = np.corrcoef(data.T) # we will use correlation matrix instead of cov.\n covM = np.cov(data.T)\n eigvalue,eigvector = np.linalg.eig(covM) # each col of the eigvector matrix corresponds to one eigenvalue. So, each col is the coeff of one component\n pca = np.dot(data,eigvector) # each col is one pca, each row is one obs in that pca. \n return eigvalue,eigvector,pca", "def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):\n if x.size != y.size:\n raise ValueError(\"x and y must be the same size\")\n\n cov = np.cov(x, y)\n pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])\n # Using a special case to obtain the eigenvalues of this\n # two-dimensionl dataset.\n ell_radius_x = np.sqrt(1 + pearson)\n ell_radius_y = np.sqrt(1 - pearson)\n ellipse = patches.Ellipse((0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2,\n facecolor=facecolor, **kwargs)\n\n # Calculating the stdandard deviation of x from\n # the squareroot of the variance and multiplying\n # with the given number of standard deviations.\n scale_x = np.sqrt(cov[0, 0]) * n_std\n mean_x = np.mean(x)\n\n # calculating the stdandard deviation of y ...\n scale_y = np.sqrt(cov[1, 1]) * n_std\n mean_y = np.mean(y)\n\n transf = transforms.Affine2D() \\\n .rotate_deg(45) \\\n .scale(scale_x, scale_y) \\\n .translate(mean_x, mean_y)\n\n ellipse.set_transform(transf + ax.transData)\n return ax.add_patch(ellipse)", "def optimize_pca(X,Y):\n # {0, 10, 20, ..., 590} \n for n in range(0,599,10):\n \n #Fit PCA\n pca = PCA(n_components=n).fit(X)\n # Plot variance\n pylab.scatter(n, sum(pca.explained_variance_ratio_))\n \n #Place 95% line.\n pylab.axhline(y=0.95, color='r')", "def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):\n if x.size != y.size:\n raise ValueError(\"x and y must be the same size\")\n\n cov = np.cov(x, y)\n pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])\n # Using a special case to obtain the eigenvalues of this\n # two-dimensionl dataset.\n ell_radius_x = np.sqrt(1 + pearson)\n ell_radius_y = np.sqrt(1 - pearson)\n ellipse = mpl.patches.Ellipse(\n (0, 0),\n width=ell_radius_x * 2,\n height=ell_radius_y * 2,\n facecolor=facecolor, **kwargs\n )\n\n # Calculating the stdandard deviation of x from\n # the squareroot of the variance and multiplying\n # with the given number of standard deviations.\n scale_x = np.sqrt(cov[0, 0]) * n_std\n mean_x = np.mean(x)\n\n # calculating the stdandard deviation of y ...\n scale_y = np.sqrt(cov[1, 1]) * n_std\n mean_y = np.mean(y)\n\n transf = transforms.Affine2D() \\\n .rotate_deg(45) \\\n .scale(scale_x, scale_y) \\\n .translate(mean_x, mean_y)\n\n ax.plot([mean_x], [mean_y], '*',\n color='yellow', markersize=20,\n markeredgecolor='black')\n\n ellipse.set_transform(transf + ax.transData)\n return ax.add_patch(ellipse)", "def doPCA(self):\n data = [l.points for l in self.preprocessedLandmarks]\n data.append(data[0])\n\n S = np.cov(np.transpose(data))\n\n eigenvalues, eigenvectors = np.linalg.eig(S)\n sorted_values = np.flip(eigenvalues.argsort(), 0)[:self.pcaComponents]\n\n self.eigenvalues = eigenvalues[sorted_values]\n self.eigenvectors = eigenvectors[:, sorted_values]\n # print(self.eigenvalues)\n return self", "def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):\n if x.size != y.size:\n raise ValueError(\"x and y must be the same size\")\n\n cov = np.cov(x, y)\n pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])\n # Using a special case to obtain the eigenvalues of this\n # two-dimensionl dataset.\n ell_radius_x = np.sqrt(1 + pearson)\n ell_radius_y = np.sqrt(1 - pearson)\n ellipse = Ellipse((0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2,\n facecolor=facecolor, **kwargs)\n\n # Calculating the stdandard deviation of x from\n # the squareroot of the variance and multiplying\n # with the given number of standard deviations.\n scale_x = np.sqrt(cov[0, 0]) * n_std\n mean_x = np.mean(x)\n\n # calculating the stdandard deviation of y ...\n scale_y = np.sqrt(cov[1, 1]) * n_std\n mean_y = np.mean(y)\n\n transf = transforms.Affine2D() \\\n .rotate_deg(45) \\\n .scale(scale_x, scale_y) \\\n .translate(mean_x, mean_y)\n\n ellipse.set_transform(transf + ax.transData)\n return ax.add_patch(ellipse)", "def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):\n if x.size != y.size:\n raise ValueError(\"x and y must be the same size\")\n\n cov = np.cov(x, y)\n pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])\n # Using a special case to obtain the eigenvalues of this\n # two-dimensionl dataset.\n ell_radius_x = np.sqrt(1 + pearson)\n ell_radius_y = np.sqrt(1 - pearson)\n ellipse = Ellipse((0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2,\n facecolor=facecolor, **kwargs)\n\n # Calculating the stdandard deviation of x from\n # the squareroot of the variance and multiplying\n # with the given number of standard deviations.\n scale_x = np.sqrt(cov[0, 0]) * n_std\n mean_x = np.mean(x)\n\n # calculating the stdandard deviation of y ...\n scale_y = np.sqrt(cov[1, 1]) * n_std\n mean_y = np.mean(y)\n\n transf = transforms.Affine2D() \\\n .rotate_deg(45) \\\n .scale(scale_x, scale_y) \\\n .translate(mean_x, mean_y)\n\n ellipse.set_transform(transf + ax.transData)\n return ax.add_patch(ellipse)", "def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):\n if x.size != y.size:\n raise ValueError(\"x and y must be the same size\")\n\n cov = np.cov(x, y)\n pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])\n # Using a special case to obtain the eigenvalues of this\n # two-dimensionl dataset.\n ell_radius_x = np.sqrt(1 + pearson)\n ell_radius_y = np.sqrt(1 - pearson)\n ellipse = Ellipse((0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2,\n facecolor=facecolor, **kwargs)\n\n # Calculating the stdandard deviation of x from\n # the squareroot of the variance and multiplying\n # with the given number of standard deviations.\n scale_x = np.sqrt(cov[0, 0]) * n_std\n mean_x = np.mean(x)\n\n # calculating the stdandard deviation of y ...\n scale_y = np.sqrt(cov[1, 1]) * n_std\n mean_y = np.mean(y)\n\n transf = transforms.Affine2D() \\\n .rotate_deg(45) \\\n .scale(scale_x, scale_y) \\\n .translate(mean_x, mean_y)\n\n ellipse.set_transform(transf + ax.transData)\n return ax.add_patch(ellipse)", "def pca(X, k):\n n, dim = X.shape\n\n # Center the data\n X_mean = np.mean(X, axis = 0)\n X = X - X_mean\n # Get the covariance matrix\n covariance_matrix = np.dot(X.T, X) / (n - 1)\n eigval, eigvec = eigs(covariance_matrix, k)\n return np.array(eigvec), np.array(eigval)", "def pca(self):\n self.pca_mean = self.X.mean(axis=1)\n X_meanC = self.X - self.pca_mean[:, None]\n (self.pca_U, self.pca_S, self.pca_V) = np.linalg.svd(X_meanC, full_matrices=False)\n self.pc_weights = np.dot(np.diag(self.pca_S), self.pca_V)\n self.pc_stdevs = np.std(self.pc_weights, axis=1)", "def pca(X, k = 30):\n \n # Center/scale the data.\n s = np.std(X, axis=0)\n s = np.where(s==0, 1, s)\n X = (X - np.mean(X, axis=0))/s\n \n # Run PCA with sklearn.\n pca_ = PCA(n_components=k)\n return pca_.fit_transform(X)", "def do_pca(x_data, n_class):\n\n run_pca = decomposition.PCA(n_components = n_class)\n pca_fit = run_pca.fit(x_data)\n #pca_fit\n x_pca = run_pca.transform(x_data);\n #pca_cov = run_pca.get_covariance(x_pca)\n #pca_score = run_pca.score(x_data)\n pca_noise = pca_fit.noise_variance_\n pca_var_explained = pca_fit.explained_variance_ratio_\n\n return x_pca, pca_noise, pca_var_explained", "def get_pca():\n from sklearn.decomposition import PCA\n return PCA()", "def princomp(A):\n # computing eigenvalues and eigenvectors of covariance matrix\n M = (A-np.mean(A.T,axis=1)).T # subtract the mean (along columns)\n [latent,coeff] = np.linalg.eig(np.cov(M)) # attention:not always sorted\n score = np.dot(coeff.T,M) # projection of the data in the new space\n return coeff,score,latent", "def performpca(df, nb_pc=5):\n # Remove uncomplete series\n print(df.shape)\n normalized=(df-df.mean())/df.std()\n # normalized.plot()\n # plt.show()\n pca = PCA(nb_pc)\n pca.fit(normalized)\n return pca, normalized", "def pca():\n pca = PCA()\n\n data = pca.fit_transform([[22,23,24],[23,84,12],[22,74,54],[22,23,24],[22,84,12],[22,74,54],[22,23,24],[22,84,12],[22,74,54]])\n\n print(data)", "def PCA(X, dims_rescaled_data=21):\n # pca = decomposition.PCA(n_components=3)\n # x_std = StandardScaler().fit_transform(X)\n # a = pca.fit_transform(x_std)\n\n R = np.cov(X, rowvar=False)\n evals, evecs = scipy.linalg.eigh(R)\n idx = np.argsort(evals)[::-1]\n evecs = evecs[:,idx]\n\n evals = evals[idx]\n evecs = evecs[:, :dims_rescaled_data]\n\n newX = np.dot(evecs.T, X.T).T\n\n return newX #, evals, evecs", "def pca(X, ndim):\n X_m = X - np.mean(X, axis=0)\n u, s, vh = np.linalg.svd(X_m)\n # traditional notation decomp(A) = U (sigma) VT = (u * s) @ vh\n W = vh[0:ndim].T\n # X_m = X - np.mean(X, axis=0)\n return np.matmul(X_m, W)", "def kernelpca(X, n_comp):\n estimator = decomposition.KernelPCA(n_components = n_comp, kernel = 'rbf')\n estimator.fit(X)\n X_proj = estimator.transform(X)\n return estimator.components_, X_proj,", "def pca(self, colour_by='stroke') -> None:\n data = self.data[['age', 'avg_glucose_level', 'bmi']]\n pca = PCA()\n pc = pca.fit_transform(data)\n pc = pd.DataFrame(pc, index=data.index)\n data = pd.concat([self.data, pc], axis=1)\n\n if isinstance(colour_by, str):\n n_colours = len(data[colour_by].unique())\n elif isinstance(colour_by, (tuple, list)):\n n_colours = len(data[colour_by[0]].unique())\n else:\n raise TypeError\n\n fig, ax = plt.subplots()\n plt.scatter(x=data[0], y=data[1], c=data['stroke'], alpha=0.3, cmap='viridis') # , label=label)\n cbar = plt.colorbar()\n cbar.ax.set_title('stroke')\n sns.despine(fig=fig, top=True, right=True)\n plt.xlabel('PC1 ({:.2f}%)'.format(pca.explained_variance_ratio_[0] * 100))\n plt.ylabel('PC2 ({:.2f}%)'.format(pca.explained_variance_ratio_[1] * 100))\n if n_colours < 8:\n plt.legend()\n plt.title(f'PCA coloured by {colour_by}')\n if self.savefig:\n fname = os.path.join(stroke_assessment.PCA_PLOTS_DIR, f'{colour_by}.png')\n plt.savefig(fname, dpi=300, bbox_inches='tight')\n else:\n plt.show()", "def pca(data, components):\n\n\t_pca = PCA(n_components = components)\n\t_pca.fit(data)\n\tvar = _pca.explained_variance_ratio_\n\tcum_var = np.cumsum(np.round(var, decimals=4)*100)\n\tfig = plt.plot(cum_var)\n\trotation = pd.DataFrame(\n\t\t_pca.components_,\n\t\tcolumns = data.columns,\n\t\tindex = ['PC-1','PC-2','PC-3','PC-4','PC-5','PC-6','PC-7','PC-8','PC-9',]\n\t\t)\n\n\treturn (fig, rotation)", "def apply_PCA(data, ncomp):\n import sklearn.decomposition as dc\n \n pca = dc.PCA(n_components=ncomp, whiten=False, svd_solver='full')\n cps = pca.fit_transform(data)\n svl = pca.singular_values_\n return cps,pca,svl", "def pca_detector(data):\n #- 'vol_shape' is the shape of volumes\n vol_shape = data.shape[:-1]\n #- 'n_vols' is the number of volumes\n n_vols = data.shape[-1]\n #- N is the number of voxels in a volume\n N = np.prod(vol_shape)\n\n #- Reshape to 2D array that is voxels by volumes (N x n_vols)\n # transpose to n_vols x N\n X = data.reshape((N, n_vols)).T\n\n \"\"\"\n The first part of the code will use PCA to get component matrix U\n and scalar projections matrix C\n \"\"\"\n\n #- Calculate unscaled covariance matrix for X\n unscaled_cov = X.dot(X.T)\n\n #- Use SVD to return U, S, VT matrices from unscaled covariance\n U, S, VT = npl.svd(unscaled_cov)\n\n #- Calculate the scalar projections for projecting X onto the vectors in U.\n #- Put the result into a new array C.\n C = U.T.dot(X)\n # set nans to 0\n C[np.isnan(C)] = 0\n #- Transpose C\n #- Reshape C to have the 4D shape of the original data volumes.\n C_vols = C.T.reshape((vol_shape + (n_vols,)))\n\n \"\"\"\n The second part of the code determines which voxels are inside the brain\n and which are outside the brain and creates a mask (boolean matrix)\n \"\"\"\n\n #get the mean voxel intensity of entire 4D object\n mean_voxel = np.mean(data)\n #get the mean volume (3D) across time series (axis 3)\n mean_volume = np.mean(data, axis=3)\n #boolean mask set to all voxels above .5 in the first volume\n #(.125 is the SPM criterion but .5 seems like a better threshold)\n mask = mean_volume > (.5 * mean_voxel) #threshold can be adjusted!\n out_mask = ~mask\n\n \"\"\"\n The third part of code finds the root mean square of U from step 1, then uses the\n mask from step 2 to determine which components explain data outside the brain\n Selects these \"bad components\" with high \"outsideness\"\n \"\"\"\n\n #Apply mask to C matrix to get all voxels outside of brain\n outside = C_vols[out_mask]\n #Get RMS of the voxels outside, reflecting \"outsideness\" of this scan\n RMS_out = np.sqrt(np.mean((outside ** 2), axis=0))\n\n #Apply mask to C matrix to get all voxels inside brain\n inside = C_vols[mask]\n #Get RMS of the voxels inside, reflecting \"insideness\" of this scan\n RMS_in = np.sqrt(np.mean((inside ** 2), axis=0))\n\n #The closer this ratio is to 1, the worse the volume\n RMS_ratio = RMS_out / RMS_in\n\n \"\"\"\n The fourth part of the code uses the \"bad components\" to generate a new\n \"bad data set\" and then puts this dataset through the outlier detector\n \"\"\"\n\n #Create a boolean mask for the 10% worst PCs (meaning highest RMS ratio)\n PC_bad = np.percentile(RMS_ratio, 90)\n PC_bad_mask = RMS_ratio > PC_bad\n\n U_bad = U[:, PC_bad_mask]\n C_bad = C[PC_bad_mask]\n\n #generates data set based on the bad PCs and (U and C matrices)\n X_bad = U_bad.dot(C_bad).T.reshape((vol_shape + (n_vols,)))\n\n # calculate outliers using iqr_detector\n _, outliers = mah_detector(X_bad)\n\n return X_bad, outliers", "def princomp(A):\n # computing eigenvalues and eigenvectors of covariance matrix\n # subtract the mean (along columns)\n M = (A-mean(A.T,axis=1)).T\n # attention:not always sorted\n [latent,coeff] = linalg.eig(cov(M))\n\n # projection of the data in the new space\n score = dot(coeff.T,M)\n return coeff,score,latent", "def pca(X, ndim):\n\n Xmean = X - np.mean(X, axis=0)\n _, _, vh = np.linalg.svd(Xmean)\n W = vh[:ndim].T\n T = np.matmul(Xmean, W)\n\n return T", "def plot_cov_ellipse(ellipses, cov, pos=[0.0, 0.0], nstds=[0.0,1.0,2.0], **kwargs):\n def eigsorted(cov):\n vals, vecs = _np.linalg.eigh(cov)\n order = vals.argsort()[::-1]\n return vals[order], vecs[:,order]\n\n\n vals, vecs = eigsorted(cov)\n theta = _np.degrees(_np.arctan2(*vecs[:,0][::-1]))\n\n # Width and height are \"full\" widths, not radius\n sigma_max = 0.5\n alpha = min(0.8, _np.prod(sigma_max /_np.sqrt(vals)))\n for i,e in enumerate(ellipses):\n sigma = nstds[i]\n width, height = 2 * sigma * _np.sqrt(vals)\n #ellipses[i].center = pos\n e.set_alpha(alpha)\n if sigma > 0.1: #if this is below, then treat ellipse as a center circle and do not modify size at all\n e.width = width\n e.height= height\n e.angle = theta\n e.center = pos\n e.set(**kwargs)\n\n# e.fill=True\n# e.set_linewidth(0.0)\n\n\n return ellipses", "def get_cov_ellipse(cov, centre, nstd, **kwargs):\n #WZN\n\n # Find and sort eigenvalues and eigenvectors into descending order\n eigvals, eigvecs = np.linalg.eigh(cov)\n order = eigvals.argsort()[::-1]\n eigvals, eigvecs = eigvals[order], eigvecs[:, order]\n\n # The anti-clockwise angle to rotate our ellipse by \n vx, vy = eigvecs[:,0][0], eigvecs[:,0][1]\n theta = np.arctan2(vy, vx)\n\n # Width and height of ellipse to draw\n width, height = 2 * nstd * np.sqrt(eigvals)\n return Ellipse(xy=centre, width=width, height=height,\n angle=np.degrees(theta), **kwargs)" ]
[ "0.71092635", "0.64815754", "0.64644057", "0.6348164", "0.6333782", "0.63307154", "0.6305946", "0.62970555", "0.6267798", "0.6267798", "0.6267798", "0.6246083", "0.6168096", "0.61434823", "0.603798", "0.60196155", "0.6015497", "0.6015308", "0.5967477", "0.59300923", "0.58183146", "0.58109754", "0.58054143", "0.5804403", "0.5785542", "0.57108754", "0.5708496", "0.5685423", "0.5679091", "0.56673867" ]
0.75360584
0
Create a control for each property in the videobalance widget
def customWidgets(self): # to be called a property value needs to change def onValueChanged(widget, prop): # set the corresponding property of the videobalance element self.balance.set_property(prop, widget.get_value()) # videobalance has several properties, with the following range # and defaults properties = [("contrast", 0, 2, 1), ("brightness", -1, 1, 0), ("hue", -1, 1, 0), ("saturation", 0, 2, 1)] # create a place to hold our controls controls = gtk.VBox() labels = gtk.VBox() # for every propety, create a control and set its attributes for prop, lower, upper, default in properties: widget = gtk.HScale(); label = gtk.Label(prop) # set appropriate atributes widget.set_update_policy(gtk.UPDATE_CONTINUOUS) widget.set_value(default) widget.set_draw_value(True) widget.set_range(lower, upper) # connect to our signal handler, specifying the property # to adjust widget.connect("value-changed", onValueChanged, prop) # pack widget into box controls.pack_start(widget, True, True) labels.pack_start(label, True, False) layout = gtk.HBox() layout.pack_start(labels, False, False) layout.pack_end(controls, True, True) return layout
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def toControls(self,widget):", "def fromControls(self,widget):", "def getControls(self):", "def create_widgets(self):", "def create_widgets( self ):", "def CreateControls(self, propgrid, property, pos, sz):\n try:\n x, y = pos\n w, h = sz\n h = 64 + 6\n\n # Make room for button\n bw = propgrid.GetRowHeight()\n w -= bw\n\n s = property.GetDisplayedString();\n\n tc = wx.TextCtrl(propgrid.GetPanel(), wxpg.PG_SUBID1, s,\n (x, y), (w, h),\n wx.TE_PROCESS_ENTER)\n btn = wx.Button(propgrid.GetPanel(), wxpg.PG_SUBID2, '...',\n (x + w, y),\n (bw, h), wx.WANTS_CHARS)\n return (tc, btn)\n except:\n import traceback\n print(traceback.print_exc())", "def create_widgets(self):\n # self.var_spherical = IntVar()\n # self.var_3d = IntVar()\n # self.var_spatial_audio = IntVar()\n # self.button_open[\"command\"] = self.action_open\n # self.button_inject[\"command\"] = self.action_inject\n pass", "def build_controls(self):\n controlSizer = wx.BoxSizer(wx.HORIZONTAL)\n \n btnData = [{'bitmap':'player_pause.png', \n 'handler':self.on_pause, 'name':'pause'},\n {'bitmap':'player_stop.png',\n 'handler':self.on_stop, 'name':'stop'}]\n for btn in btnData:\n self.build_btn(btn, controlSizer)\n \n return controlSizer", "def _createView(self):\n\n items = []\n\n if self.showProgressBar:\n items.append(Item('progress', show_label=False,\n editor=ProgressEditor(callback=self._seek)))\n\n # Controls\n items.append(\n alignCenter(\n Item('backwardButton', style='custom',\n enabled_when='not object.running and object.mainloopRunning '\n +'and object.sensors and object.iteration > 1'),\n Item('runButton', style='custom',\n enabled_when='object.pause and not object.done'),\n Item('pauseButton', style='custom',\n enabled_when='not (object.pause or object.done)'),\n Item('stepButton', style='custom',\n enabled_when='object.pause and not object.done'),\n show_labels=False,\n orientation='horizontal'\n ))\n\n # Repeat button and pause target buttons\n items.append(\n alignCenter(\n Item('repeatButton', show_label=False,\n enabled_when='not object.running and object.mainloopRunning '\n 'and object.iteration > 0'),\n Item('nextTargetButton', show_label=False,\n editor=ButtonEditor(label_value='targetButtonLabel'),\n enabled_when='not object.running and object.mainloopRunning '\n 'and object.pauseTarget'),\n Item('customTargetButton', show_label=False,\n enabled_when='not object.running and object.mainloopRunning')\n ))\n\n # Speed control\n items.append(Item('speed', style='custom', show_label=False,\n editor=EnumEditor(cols=1, values={\n 1 : '1: Slow (update on every iteration)',\n 10 : '2: Medium (update every 10 iterations)',\n 100 : '3: Fast (update every 100 iterations)'\n })\n ))\n\n\n items.extend([\n Group(\n Item('pauseAtNextStep'),\n show_left=False\n ),\n alignLeft(\n Item('stopButton', show_label=False, enabled_when='object.iteration')\n )\n ])\n\n self.traits_view = View(*items)", "def create_widgets(self):\n\n # tk.Button(win, text=\"Update\", command=self.update).grid(row=1, column=1)\n tkvar = tk.StringVar(win)\n # Dictionary with options\n choices = ('Clear', 'Small Glider', 'Glider', 'Exploder', '10 Cell Row', 'Light Weight Spaceship', 'Tumbler',\n 'Gosper Glider Gu')\n self.combo_input = ttk.Combobox(self.control_area, width=25, values=choices, state='readonly')\n self.combo_input.pack(side=tk.LEFT)\n self.combo_input.current(0)\n self.combo_input.bind(\"<<ComboboxSelected>>\", self.combo_callback)\n\n self.next = tk.Button(self.control_area, text=\"Next\", command=self.next_generation)\n self.next.pack(side=tk.LEFT, padx=3, pady=2)\n self.start = tk.Button(self.control_area, text=\"Start\", command=self.start_game)\n self.start.pack(side=tk.LEFT, padx=3, pady=2)\n\n self.stop = tk.Button(self.control_area, text=\"Stop\", fg=\"red\", command=self.stop_game)\n self.stop.pack(side=tk.LEFT, padx=3, pady=2)\n\n self.stop = tk.Button(self.control_area, text=\"Fast\", fg=\"red\", command=self.stop_game)\n self.stop.pack(side=tk.LEFT, padx=3, pady=2)\n self.gen_label = tk.Label(win, text=\"label\", bg=\"#808080\")\n self.gen_label.grid(row=0, column=1)", "def __display_controls(self):\n self.__fill_data_variables()\n self.__fill_smoothing_method()\n self.__fill_smooth_factor()\n\n left_box = VBox([self.data_vars])\n center_box = VBox([self.smoothing_methods])\n right_box = VBox([self.smooth_factor])\n #_HBox = HBox([left_box, center_box, right_box],\n _HBox = HBox([left_box, center_box, right_box],\n layout={'height': '80px',\n 'width' : '99%'}\n )\n display(_HBox)", "def propCalls():\r\n # create object and curve slot for dropdown\r\n bpy.types.Object.TCinitObject = bpy.props.StringProperty()\r\n bpy.types.Object.TCinitCurve = bpy.props.StringProperty()\r\n # curve or grease pencil enum\r\n bpy.types.Scene.TCinitCurveType = bpy.props.EnumProperty(\r\n items = [('1', 'Grease Pencil', 'grease'), \r\n ('2', 'Curve', 'curve')],\r\n name = \"curveType\")\r\n # create initial value boxes\r\n bpy.types.Scene.TCinitDepth = FloatProperty(name = \"Depth\", description = \"depth from view\", default = 5.00, min = -100, max = 100)\r\n bpy.types.Scene.TCinitDivision = FloatProperty(name = \"Division\", description = \"Division Spacing\", default = 0.5, min = -100, max = 100)\r\n bpy.types.Scene.TCinitExtrusion = FloatProperty(name = \"Extrusion\", description = \"Extrusion Depth\", default = 5, min = -100, max = 100)\r\n # axis enum\r\n bpy.types.Scene.TCinitAxis = bpy.props.EnumProperty(\r\n items = [('1', '3D Cursor', 'cursor'),\r\n ('2', 'X', 'x'), \r\n ('3', 'Y', 'y')],\r\n name = \"axisType\")\r\n # apply modifier\r\n bpy.types.Scene.TCinitApplyMod = BoolProperty(\r\n name = \"Apply Boolean\", \r\n description = \"Apply Boolean?\",\r\n default = True)\r\n # is cyclic or extrude\r\n bpy.types.Scene.TCinitCyclic = BoolProperty(\r\n name = \"Cyclic\", \r\n description = \"Disable extrusion and enable cyclic hole?\",\r\n default = False)\r\n # reverse direction\r\n bpy.types.Scene.TCinitReverseDir = BoolProperty(\r\n name = \"Reverse Direction\", \r\n description = \"Reverse Direction\",\r\n default = False)\r\n # reverse depth\r\n bpy.types.Scene.TCinitReverseDepth= BoolProperty(\r\n name = \"Reverse Depth\", \r\n description = \"Reverse Depth of extrusion\",\r\n default = False)\r\n # reverse trim\r\n bpy.types.Scene.TCinitReverseTrim = BoolProperty(\r\n name = \"Reverse Trim\", \r\n description = \"Reverse Trim\",\r\n default = False)\r\n # return to mode enum\r\n bpy.types.Scene.TCinitReturnMode = bpy.props.EnumProperty(\r\n items = [('1', 'Sculpt', 'sculpt'), \r\n ('2', 'Object', 'object'), \r\n ('3', 'Edit', 'edit')],\r\n name = \"returnMode\")", "def __set_control_elements(*args):\n args[0].Controls.valve_number = args[1]\n args[0].Controls.set_valve_number()\n args[0].Controls.go_elements = args[2]\n args[0].Controls.set_go_elements()", "def init ( self, parent ):\n # Create a panel to hold all of the buttons:\n self.control = panel = wx.Panel( parent, -1 )\n sizer = wx.BoxSizer( wx.VERTICAL )\n \n # Add the standard font control:\n font = self._font = wx.TextCtrl( panel, -1, self.str_value )\n wx.EVT_KILL_FOCUS( font, self.update_object )\n wx.EVT_TEXT_ENTER( panel, font.GetId(), self.update_object )\n sizer.Add( font, 0, wx.EXPAND | wx.BOTTOM, 3 )\n \n # Add all of the font choice controls:\n sizer2 = wx.BoxSizer( wx.HORIZONTAL )\n facenames = all_facenames()\n control = self._facename = wx.Choice( panel, -1, wx.Point( 0, 0 ), \n wx.Size( choice_width( facenames ), 20 ), \n facenames )\n \n sizer2.Add( control, 2, wx.EXPAND )\n wx.EVT_CHOICE( panel, control.GetId(), self.update_object_parts )\n \n control = self._point_size = wx.Choice( panel, -1, \n wx.Point( 0, 0 ), wx.Size( 30, 20 ), \n PointSizes )\n sizer2.Add( control, 1, wx.EXPAND | wx.RIGHT, 3 )\n wx.EVT_CHOICE( panel, control.GetId(), self.update_object_parts ) \n \n sizer.Add( sizer2, 0, wx.EXPAND )\n \n # Set-up the layout:\n panel.SetAutoLayout( True )\n panel.SetSizer( sizer )\n sizer.Fit( panel )", "def set_controls(self):\n # Image control\n image = pyxbmct.Image(addonfolder+artsfolder+'/wplay.png')\n self.placeControl(image, 0, 0, rowspan=10, columnspan=16)\n\n # LNB1\n self.wplnb1_button = pyxbmct.RadioButton('')\n self.placeControl(self.wplnb1_button, 11, 1, rowspan=1, columnspan=4)\n self.connect(self.wplnb1_button, self.wplnb1_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wplnb1', 2) == 1:\n self.wplnb1_button.setSelected(True)\n else:\n self.wplnb1_button.setSelected(False)\n lnb1 = pyxbmct.Image(addonfolder+artsfolder+'/lnb1.png')\n self.placeControl(lnb1, 11, 1, rowspan=1, columnspan=4)\n\n # LNB2\n self.wplnb2_button = pyxbmct.RadioButton('')\n self.placeControl(self.wplnb2_button, 11, 6, rowspan=1, columnspan=4)\n self.connect(self.wplnb2_button, self.wplnb2_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wplnb2', 2) == 1:\n self.wplnb2_button.setSelected(True)\n else:\n self.wplnb2_button.setSelected(False)\n lnb2 = pyxbmct.Image(addonfolder+artsfolder+'/lnb2.png')\n self.placeControl(lnb2, 11, 6, rowspan=1, columnspan=4)\n\n # LNB1/LNB2\n self.wplnboth_button = pyxbmct.RadioButton('')\n self.placeControl(self.wplnboth_button, 11, 11, rowspan=1, columnspan=4)\n self.connect(self.wplnboth_button, self.wplnboth_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wplnboth', 2) == 1:\n self.wplnboth_button.setSelected(True)\n else:\n self.wplnboth_button.setSelected(False)\n both = pyxbmct.Image(addonfolder+artsfolder+'/both.png')\n self.placeControl(both, 11, 11, rowspan=1, columnspan=4)\n\n # Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def set_controls(self):\n # Image control\n image = pyxbmct.Image(addonfolder+artsfolder+'/khadas.png')\n self.placeControl(image, 0, 0, rowspan=7, columnspan=16)\n\n\t\t# KHADAS VTV\n kvtv = pyxbmct.Image(addonfolder+artsfolder+'/kvtv.png')\n self.placeControl(kvtv, 8, 2, rowspan=5, columnspan=4)\n\n\t\t# KHADAS VIM 2\n kvim = pyxbmct.Image(addonfolder+artsfolder+'/kvim.png')\n self.placeControl(kvim, 8, 11, rowspan=5, columnspan=4)\n\n\n\t\t# KHADAS KVIM2 & VTV\n self.kvimvtv_button = pyxbmct.RadioButton('')\n self.placeControl(self.kvimvtv_button, 10, 7, rowspan=2, columnspan=3)\n self.connect(self.kvimvtv_button, self.kvimvtv_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'kvim2', 2) == 1:\n self.kvimvtv_button.setSelected(True)\n else:\n self.kvimvtv_button.setSelected(False)\n kvimvtv = pyxbmct.Image(addonfolder+artsfolder+'/kvimvtv.png')\n self.placeControl(kvimvtv, 10, 7, rowspan=2, columnspan=3)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def set_controls(self):\n # Image control\n image = pyxbmct.Image(addonfolder+artsfolder+'/mapdvbt.png')\n self.placeControl(image, 0, 0, rowspan=10, columnspan=16)\n\n\t\t# TDT\n self.tdt_button = pyxbmct.RadioButton('')\n self.placeControl(self.tdt_button, 11, 1, rowspan=1, columnspan=4)\n self.connect(self.tdt_button, self.tdt_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'tdt', 2) == 1:\n self.tdt_button.setSelected(True)\n else:\n self.tdt_button.setSelected(False)\n tdt = pyxbmct.Image(addonfolder+artsfolder+'/tdt.png')\n self.placeControl(tdt, 11, 1, rowspan=1, columnspan=4)\n \n\t\t# Meo\n self.meo_button = pyxbmct.RadioButton('')\n self.placeControl(self.meo_button, 11, 6, rowspan=1, columnspan=4)\n self.connect(self.meo_button, self.meo_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'meo', 2) == 1:\n self.meo_button.setSelected(True)\n else:\n self.meo_button.setSelected(False)\n meo = pyxbmct.Image(addonfolder+artsfolder+'/meo.png')\n self.placeControl(meo, 11, 6, rowspan=1, columnspan=4)\n\n\t\t# Vodafone\n self.vodafone_button = pyxbmct.RadioButton('')\n self.placeControl(self.vodafone_button, 11, 11, rowspan=1, columnspan=4)\n self.connect(self.vodafone_button, self.vodafone_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'vodafone', 2) == 1:\n self.vodafone_button.setSelected(True)\n else:\n self.vodafone_button.setSelected(False)\n vodafone = pyxbmct.Image(addonfolder+artsfolder+'/vodafone.png')\n self.placeControl(vodafone, 11, 11, rowspan=1, columnspan=4)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def set_controls(self):\n # Image control\n image = pyxbmct.Image(addonfolder+artsfolder+'/tvh.png')\n self.placeControl(image, 0, 0, rowspan=8, columnspan=16)\n\n\t\t# Label information\n image = pyxbmct.Image(addonfolder+artsfolder+'/users.png')\n self.placeControl(image, 8, 1, rowspan=1, columnspan=14)\n\t\t\n\t\t# Username input\n image = pyxbmct.Image(addonfolder+artsfolder+'/username.png')\n self.placeControl(image, 10, 1, rowspan=1, columnspan=3)\n self.username_input = pyxbmct.Edit('')\n self.placeControl(self.username_input, 10, 4, rowspan=1, columnspan=4)\n\n\t\t# Password input\n image = pyxbmct.Image(addonfolder+artsfolder+'/password.png')\n self.placeControl(image, 11, 1, rowspan=1, columnspan=3)\n self.password_input = pyxbmct.Edit('', isPassword=True)\n self.placeControl(self.password_input, 11, 4, rowspan=1, columnspan=4)\n\n\t\t# Next button\n self.next_button = pyxbmct.Button('Next')\n self.placeControl(self.next_button, 13, 14, rowspan=1, columnspan=1)\n # Connect close button\n self.connect(self.next_button, lambda: self.page())\n\t\t\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def set_controls(self):\n # Image control\n image = pyxbmct.Image(addonfolder+artsfolder+'/mapdvbc.png')\n self.placeControl(image, 0, 0, rowspan=10, columnspan=16)\n\n\t\t# Nos\n self.nos_button = pyxbmct.RadioButton('')\n self.placeControl(self.nos_button, 10, 3, rowspan=1, columnspan=4)\n self.connect(self.nos_button, self.nos_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'nos', 2) == 1:\n self.nos_button.setSelected(True)\n else:\n self.nos_button.setSelected(False)\n nos = pyxbmct.Image(addonfolder+artsfolder+'/nos.png')\n self.placeControl(nos, 10, 3, rowspan=1, columnspan=4)\n\n\t\t# Nos Madeira\n self.madeira_button = pyxbmct.RadioButton('')\n self.placeControl(self.madeira_button, 12, 6, rowspan=1, columnspan=4)\n self.connect(self.madeira_button, self.madeira_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'madeira', 2) == 1:\n self.madeira_button.setSelected(True)\n else:\n self.madeira_button.setSelected(False)\n madeira = pyxbmct.Image(addonfolder+artsfolder+'/madeira.png')\n self.placeControl(madeira, 12, 6, rowspan=1, columnspan=4)\n\n\t\t# Nowo\n self.nowo_button = pyxbmct.RadioButton('')\n self.placeControl(self.nowo_button, 10, 9, rowspan=1, columnspan=4)\n self.connect(self.nowo_button, self.nowo_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'nowo', 2) == 1:\n self.nowo_button.setSelected(True)\n else:\n self.nowo_button.setSelected(False)\n nowo = pyxbmct.Image(addonfolder+artsfolder+'/nowo.png')\n self.placeControl(nowo, 10, 9, rowspan=1, columnspan=4)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def get_controls(self):\n return pn.Column(\n pn.Column(\n pn.Row(super().get_controls(), margin = (0, 0, -25, 0)),\n pn.pane.HoloViews(self.get_band_dmap(), linked_axes=False)\n ), \n )", "def script_properties():\n props = obs.obs_properties_create()\n p = obs.obs_properties_add_list(props, \"source\", \"Text Source\",\n obs.OBS_COMBO_TYPE_EDITABLE,\n obs.OBS_COMBO_FORMAT_STRING)\n sources = obs.obs_enum_sources()\n if sources is not None:\n for source in sources:\n source_id = obs.obs_source_get_id(source)\n if source_id == \"text_gdiplus\" or source_id == \"text_ft2_source\":\n name = obs.obs_source_get_name(source)\n obs.obs_property_list_add_string(p, name, name)\n\n obs.source_list_release(sources)\n\n obs.obs_properties_add_button(props, \"button\", \"Start Overlay\", start_overlay)\n obs.obs_properties_add_button(props, \"button2\", \"Stop Overlay\", stop_overlay)\n\n return props", "def _create_value_widgets(self):\n \n # sort values\n self.values = sorted(self.values)\n self.selection = self.default\n \n for value in self.values:\n widget = self.panel.createWidgetT(\"Button\", \"Button\", \n mygui.IntCoord(15, (len(self.widgets)* 20 + 10), self.width - 20, 20),\n mygui.Align())\n widget.setUserString(\"value\", value)\n widget.setCaption(value)\n self.widgets.append(widget)\n \n if value == self.default:\n widget.setStateCheck(True)\n \n widget.subscribeEventMouseButtonClick(self, '_onTypeClick')\n \n self.panel.setSize(self.width, len(self.widgets) * 20 + 20)", "def set_controls(self):\n image = pyxbmct.Image(addonfolder+artsfolder+'/khadasdvb.png')\n self.placeControl(image, 0, 0, rowspan=8, columnspan=16)\n\t\t\n\t\t# DVB-C\n self.dvbc_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbc_button, 10, 1, rowspan=2, columnspan=4)\n self.connect(self.dvbc_button, self.dvbc_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'khadasdvbc', 2) == 1:\n self.dvbc_button.setSelected(True)\n else:\n self.dvbc_button.setSelected(False)\n dvbc = pyxbmct.Image(addonfolder+artsfolder+'/dvbc.png')\n self.placeControl(dvbc, 10, 1, rowspan=2, columnspan=4)\n \n\t\t# DVB-S\n self.dvbs_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbs_button, 10, 6, rowspan=2, columnspan=4)\n self.connect(self.dvbs_button, self.dvbs_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'khadasdvbs', 2) == 1:\n self.dvbs_button.setSelected(True)\n else:\n self.dvbs_button.setSelected(False)\n dvbs = pyxbmct.Image(addonfolder+artsfolder+'/dvbs2.png')\n self.placeControl(dvbs, 10, 6, rowspan=2, columnspan=4)\n\n\t\t# DVB-T\n self.dvbt_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbt_button, 10, 11, rowspan=2, columnspan=4)\n self.connect(self.dvbt_button, self.dvbt_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'khadasdvbt', 2) == 1:\n self.dvbt_button.setSelected(True)\n else:\n self.dvbt_button.setSelected(False)\n dvbt = pyxbmct.Image(addonfolder+artsfolder+'/dvbt.png')\n self.placeControl(dvbt, 10, 11, rowspan=2, columnspan=4)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def properties(self,prop):\r\n # The particulars of how they are stored and manipulated (e.g., do\r\n # we want an inventory internally) is not settled. I've used a\r\n # property dictionary for now.\r\n #\r\n # How these properties interact with a user defined style file is\r\n # even less clear.\r\n\r\n # Properties defined by plot\r\n self.xbox.set_text(r\"$%s$\" % prop[\"xlabel\"])\r\n self.ybox.set_text(r\"$%s$\" % prop[\"ylabel\"])\r\n self.tbox.set_text(r\"$%s$\" % prop[\"title\"])\r\n\r\n # Properties defined by user\r\n #self.axes.grid(True)\r", "def create_controls(self):\n\n self.button_frame = tk.LabelFrame(self, text=\"Controls\", padx=5, pady=5)\n self.button_frame.grid(row=0, column=1, padx=5, pady=5, sticky=\"n\")\n self.load_data = tk.Button(\n self.button_frame, text=\"Load Data\", command=self.update_stats\n )\n self.load_data.grid(row=0)\n\n self.print_data = tk.Button(\n self.button_frame, text=\"Print Data\", command=self.print_raw_data,\n )\n self.print_data.grid(row=1)\n\n self.quit = tk.Button(\n self.button_frame, text=\"Quit\", fg=\"red\", command=self.master.destroy\n )\n self.quit.grid(row=2)", "def make_widgets(self):\n self.mode_select = Selector(**MODE_SELECT_SETTINGS)\n self.bind_keys_to_modes()\n self.layer_select = Selector(**LAYER_SELECT_SETTINGS)\n self.check_boxes = CheckBoxArray(**CHECK_ARRAY_SETTINGS)\n self.check_boxes.bind_key(pg.K_v, self.toggle_layer_visibility)\n self.navs = [Button(**NAV_LEFT), Button(**NAV_RIGHT)]\n self.save_button = Button(**SAVE_BUTTON)\n self.load_button = Button(**LOAD_BUTTON)\n self.new_button = Button(**NEW_BUTTON)\n self.widgets = [self.mode_select, self.layer_select, self.check_boxes,\n self.navs[0], self.navs[1],\n self.save_button, self.load_button, self.new_button]", "def _add_control( self, control ):\r\n try:\r\n succeeded = True\r\n control[ \"special\" ] = False\r\n if ( self.use_desc_as_key ): key = control[ \"description\" ]\r\n else: key = control[ \"id\" ]\r\n # image control\r\n if ( control[ \"type\" ] == \"image\" ):\r\n if ( control[ \"info\" ] ):\r\n control[ \"texture\" ] = xbmc.getInfoImage( control[ \"info\" ][ 0 ] )\r\n current_control = xbmcgui.ControlImage(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n filename = control[ \"texture\" ],\r\n colorKey = control[ \"colorkey\" ],\r\n aspectRatio = control[ \"aspectratio\" ],\r\n colorDiffuse = control[ \"colordiffuse\" ]\r\n )\r\n self.win.addControl( current_control )\r\n # progress control\r\n elif ( control[ \"type\" ] == \"progress\" ):\r\n current_control = xbmcgui.ControlProgress(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n texturebg = control[ \"texturebg\" ],\r\n textureleft = control[ \"lefttexture\" ],\r\n texturemid = control[ \"midtexture\" ],\r\n textureright = control[ \"righttexture\" ],\r\n textureoverlay = control[ \"overlaytexture\" ]\r\n )\r\n self.win.addControl( current_control )\r\n # label control\r\n elif ( control[ \"type\" ] == \"label\" ):\r\n if ( control[ \"info\" ] ):\r\n control[ \"label\" ][ 0 ] = xbmc.getInfoLabel( control[ \"info\" ][ 0 ] )\r\n current_control = xbmcgui.ControlLabel(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n label = control[ \"label\" ][ 0 ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ],\r\n disabledColor = control[ \"disabledcolor\" ],\r\n alignment = control[ \"align\" ],\r\n hasPath = control[ \"haspath\" ],\r\n #shadowColor = control[ \"shadowcolor\" ],\r\n angle = control[ \"angle\" ]\r\n )\r\n self.win.addControl( current_control )\r\n # button control\r\n elif ( control[ \"type\" ] == \"button\" ):\r\n if ( control[ \"info\" ] ):\r\n control[ \"label\" ][ 0 ] = xbmc.getInfoLabel( control[ \"info\" ][ 0 ] )\r\n current_control = xbmcgui.ControlButton(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\"],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n label = control[ \"label\" ][ 0 ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ],\r\n focusedColor = control[ \"focusedcolor\" ],\r\n disabledColor = control[ \"disabledcolor\" ],\r\n alignment = control[ \"align\" ],\r\n angle = control[ \"angle\" ],\r\n shadowColor = control[ \"shadowcolor\" ],\r\n focusTexture = control[ \"texturefocus\" ],\r\n noFocusTexture = control[ \"texturenofocus\" ],\r\n textXOffset = control[ \"textoffsetx\" ],\r\n textYOffset = control[ \"textoffsety\"]\r\n )\r\n self.win.addControl( current_control )\r\n # checkmark control\r\n elif ( control[ \"type\" ] == \"checkmark\" ):\r\n if ( control[ \"info\" ] ):\r\n control[ \"label\" ][ 0 ] = xbmc.getInfoLabel( control[ \"info\" ][ 0 ] )\r\n current_control = xbmcgui.ControlCheckMark(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n label = control[ \"label\" ][ 0 ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ],\r\n disabledColor = control[ \"disabledcolor\" ],\r\n alignment = control[ \"align\" ],\r\n focusTexture = control[ \"texturecheckmark\" ],\r\n noFocusTexture = control[ \"texturecheckmarknofocus\" ],\r\n checkWidth = control[ \"markwidth\" ],\r\n checkHeight = control[ \"markheight\" ]\r\n )\r\n self.win.addControl( current_control )\r\n # textbox control\r\n elif ( control[ \"type\" ] == \"textbox\" ):\r\n if ( control[ \"info\" ] ):\r\n control[ \"label\" ][ 0 ] = xbmc.getInfoLabel( control[ \"info\" ][ 0 ] )\r\n current_control = xbmcgui.ControlTextBox(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ]\r\n )\r\n self.win.addControl( current_control )\r\n if ( \"label\" in control ): current_control.setText( control[ \"label\" ][ 0 ] )\r\n #fadelabel control\r\n elif ( control[ \"type\" ] == \"fadelabel\" ):\r\n current_control = xbmcgui.ControlFadeLabel(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ],\r\n #shadowColor = control[ \"shadowcolor\" ],\r\n alignment = control[ \"align\" ]\r\n )\r\n self.win.addControl( current_control )\r\n if ( control[ \"info\" ] ):\r\n for item in control[ \"info\" ]:\r\n if ( item != \"\" ): current_control.addLabel( xbmc.getInfoLabel( item ) )\r\n if ( control[ \"label\" ] ):\r\n for item in control[ \"label\" ]:\r\n if ( item != \"\" ): current_control.addLabel( item )\r\n # list control\r\n elif ( control[ \"type\" ] == \"list\" or control[ \"type\" ] == \"listcontrol\" ):\r\n current_control = xbmcgui.ControlList(\r\n x = control[ \"posx\" ],\r\n y = control[ \"posy\" ],\r\n width = control[ \"width\" ],\r\n height = control[ \"height\" ],\r\n font = control[ \"font\" ],\r\n textColor = control[ \"textcolor\" ],\r\n alignmentY = control[ \"aligny\" ],\r\n buttonTexture = control[ \"texturenofocus\" ],\r\n buttonFocusTexture = control[ \"texturefocus\" ],\r\n selectedColor = control[ \"selectedcolor\" ],\r\n imageWidth = control[ \"itemwidth\" ],\r\n imageHeight = control[ \"itemheight\" ],\r\n itemTextXOffset = control[ \"textxoff\" ],\r\n itemTextYOffset = control[ \"textyoff\" ],\r\n itemHeight = control[ \"textureheight\" ],\r\n #shadowColor=control[\"shadowcolor\"],\r\n space = control[ \"spacebetweenitems\" ]\r\n )\r\n self.win.addControl( current_control )\r\n current_control.setPageControlVisible( not control[ \"hidespinner\" ] )\r\n control[ \"special\" ] = control[ \"hidespinner\" ]\r\n if ( control[ \"label\" ] ):\r\n for cnt, item in enumerate( control[ \"label\" ] ):\r\n if ( item != \"\" ): \r\n if ( cnt < len( control[ \"label2\" ] ) ): tmp = control[ \"label2\" ][ cnt ]\r\n else: tmp = \"\"\r\n if ( cnt < len( control[ \"image\" ] ) ): tmp2 = control[ \"image\" ][ cnt ]\r\n elif control[ \"image\" ]: tmp2 = control[ \"image\" ][ len( control[ \"image\" ] ) - 1 ]\r\n else: tmp2 = \"\"\r\n list_item = xbmcgui.ListItem( item, tmp, tmp2, tmp2 )\r\n current_control.addItem( list_item )\r\n \r\n self.win.controls[ key ] = {\r\n \"id\"\t\t\t: control[ \"id\" ],\r\n \"controlId\"\t: current_control.getId(),\r\n \"control\"\t\t: current_control,\r\n \"special\"\t\t: control[ \"special\" ],\r\n \"visible\"\t\t: [ control[ \"visible\" ].lower(), control[ \"allowhiddenfocus\" ] ],\r\n \"enable\"\t\t: control[ \"enable\" ].lower(),\r\n \"animation\"\t: control[ \"animation\" ],\r\n \"onclick\"\t\t: control[ \"onclick\" ],\r\n \"onfocus\"\t: control[ \"onfocus\" ]\r\n }\r\n self.navigation[ control[ \"id\" ] ] = ( key, int( control[ \"onup\" ] ), int( control[ \"ondown\" ] ), int( control[ \"onleft\" ] ), int( control[ \"onright\" ] ) )\r\n except:\r\n succeeded = False\r\n return succeeded", "def set_controls(self):\n image = pyxbmct.Image(addonfolder+artsfolder+'/dvb.png')\n self.placeControl(image, 0, 0, rowspan=8, columnspan=16)\n\t\t\n\t\t# DVB-C\n self.dvbc_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbc_button, 10, 1, rowspan=2, columnspan=4)\n self.connect(self.dvbc_button, self.dvbc_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wdvbc', 2) == 1:\n self.dvbc_button.setSelected(True)\n else:\n self.dvbc_button.setSelected(False)\n dvbc = pyxbmct.Image(addonfolder+artsfolder+'/dvbc.png')\n self.placeControl(dvbc, 10, 1, rowspan=2, columnspan=4)\n \n\t\t# DVB-S\n self.dvbs_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbs_button, 10, 6, rowspan=2, columnspan=4)\n self.connect(self.dvbs_button, self.dvbs_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wdvbs', 2) == 1:\n self.dvbs_button.setSelected(True)\n else:\n self.dvbs_button.setSelected(False)\n dvbs = pyxbmct.Image(addonfolder+artsfolder+'/dvbs2.png')\n self.placeControl(dvbs, 10, 6, rowspan=2, columnspan=4)\n\n\t\t# DVB-T\n self.dvbt_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbt_button, 10, 11, rowspan=2, columnspan=4)\n self.connect(self.dvbt_button, self.dvbt_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'wdvbt', 2) == 1:\n self.dvbt_button.setSelected(True)\n else:\n self.dvbt_button.setSelected(False)\n dvbt = pyxbmct.Image(addonfolder+artsfolder+'/dvbt.png')\n self.placeControl(dvbt, 10, 11, rowspan=2, columnspan=4)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def set_controls(self):\n image = pyxbmct.Image(addonfolder+artsfolder+'/dvb.png')\n self.placeControl(image, 0, 0, rowspan=8, columnspan=16)\n\t\t\n\t\t# DVB-C\n self.dvbc_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbc_button, 10, 1, rowspan=2, columnspan=4)\n self.connect(self.dvbc_button, self.dvbc_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'gdvbc', 2) == 1:\n self.dvbc_button.setSelected(True)\n else:\n self.dvbc_button.setSelected(False)\n dvbc = pyxbmct.Image(addonfolder+artsfolder+'/dvbc.png')\n self.placeControl(dvbc, 10, 1, rowspan=2, columnspan=4)\n \n\t\t# DVB-S\n self.dvbs_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbs_button, 10, 6, rowspan=2, columnspan=4)\n self.connect(self.dvbs_button, self.dvbs_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'gdvbs', 2) == 1:\n self.dvbs_button.setSelected(True)\n else:\n self.dvbs_button.setSelected(False)\n dvbs = pyxbmct.Image(addonfolder+artsfolder+'/dvbs2.png')\n self.placeControl(dvbs, 10, 6, rowspan=2, columnspan=4)\n\n\t\t# DVB-T\n self.dvbt_button = pyxbmct.RadioButton('')\n self.placeControl(self.dvbt_button, 10, 11, rowspan=2, columnspan=4)\n self.connect(self.dvbt_button, self.dvbt_button_update)\n if tools.return_data('TVHWIZARD', 'STRING', 'gdvbt', 2) == 1:\n self.dvbt_button.setSelected(True)\n else:\n self.dvbt_button.setSelected(False)\n dvbt = pyxbmct.Image(addonfolder+artsfolder+'/dvbt.png')\n self.placeControl(dvbt, 10, 11, rowspan=2, columnspan=4)\n\n\t\t# Close button\n self.close_button = pyxbmct.Button('Exit')\n self.placeControl(self.close_button, 13, 15, rowspan=1, columnspan=1)\n self.connect(self.close_button, lambda: self.closepage())", "def _create_widgets(self):\n # Generator Settings Group\n self._generator_group_box = PySide2.QtWidgets.QGroupBox()\n self._generator_group_box.setTitle(\"Level Generator Settings\")\n\n # Level Size\n self._level_size_lbl = PySide2.QtWidgets.QLabel(\"Level Size\")\n # X\n self._level_size_x_lbl = PySide2.QtWidgets.QLabel(\"X\")\n self._level_size_x_spinbox = PySide2.QtWidgets.QSpinBox()\n self._level_size_x_spinbox.setMinimum(level.MINIMUM_SIZE[X])\n self._level_size_x_spinbox.setMaximum(MAXIMUM_SIZE[X])\n # Y\n self._level_size_y_lbl = PySide2.QtWidgets.QLabel(\"Y\")\n self._level_size_y_spinbox = PySide2.QtWidgets.QSpinBox()\n self._level_size_y_spinbox.setMinimum(level.MINIMUM_SIZE[Y])\n self._level_size_x_spinbox.setMaximum(MAXIMUM_SIZE[Y])\n # Z\n self._level_size_z_lbl = PySide2.QtWidgets.QLabel(\"Z\")\n self._level_size_z_spinbox = PySide2.QtWidgets.QSpinBox()\n self._level_size_z_spinbox.setMinimum(level.MINIMUM_SIZE[Z])\n self._level_size_x_spinbox.setMaximum(MAXIMUM_SIZE[Z])\n\n # Minimum Length\n self._minimum_length_checkbox = PySide2.QtWidgets.QCheckBox(\"Minimum Length\")\n self._minimum_length_spinbox = PySide2.QtWidgets.QSpinBox()\n self._minimum_length_spinbox.setMinimum(0) # Negative minimum lengths don't make sense\n self._minimum_length_spinbox.setMaximum(MAXIMUM_LENGTH) # Minimum lengths > max length doesn't make sense\n\n # Maximum Length\n self._maximum_length_checkbox = PySide2.QtWidgets.QCheckBox(\"Maximum Length\")\n self._maximum_length_spinbox = PySide2.QtWidgets.QSpinBox()\n self._maximum_length_spinbox.setMinimum(2) # Maximum lengths < 2 don't make sense\n self._maximum_length_spinbox.setMaximum(MAXIMUM_LENGTH) # Maximum lengths > max length doesn't make sense\n\n # Seed\n self._seed_checkbox = PySide2.QtWidgets.QCheckBox(\"Seed\")\n self._seed_le = PySide2.QtWidgets.QLineEdit()\n\n # Scene Settings Group\n self._scene_group_box = PySide2.QtWidgets.QGroupBox()\n self._scene_group_box.setTitle(\"Maya Scene Settings\")\n\n # Block Size\n self._block_size_lbl = PySide2.QtWidgets.QLabel(\"Block Size\")\n # X\n self._block_size_x_lbl = PySide2.QtWidgets.QLabel(\"X\")\n self._block_size_x_spinbox = PySide2.QtWidgets.QDoubleSpinBox()\n self._block_size_x_spinbox.setMinimum(0)\n self._block_size_x_spinbox.setMaximum(MAXIMUM_BLOCK_DIMENSION)\n self._block_size_x_spinbox.setDecimals(MAXIMUM_BLOCK_PRECISION)\n self._block_size_x_spinbox.setSingleStep(float(\"1e-{}\".format(MAXIMUM_BLOCK_PRECISION)))\n # Y\n self._block_size_y_lbl = PySide2.QtWidgets.QLabel(\"Y\")\n self._block_size_y_spinbox = PySide2.QtWidgets.QDoubleSpinBox()\n self._block_size_y_spinbox.setMinimum(0)\n self._block_size_y_spinbox.setMaximum(MAXIMUM_BLOCK_DIMENSION)\n self._block_size_y_spinbox.setDecimals(MAXIMUM_BLOCK_PRECISION)\n self._block_size_x_spinbox.setSingleStep(float(\"1e-{}\".format(MAXIMUM_BLOCK_PRECISION)))\n # Z\n self._block_size_z_lbl = PySide2.QtWidgets.QLabel(\"Z\")\n self._block_size_z_spinbox = PySide2.QtWidgets.QDoubleSpinBox()\n self._block_size_z_spinbox.setMinimum(0)\n self._block_size_z_spinbox.setMaximum(MAXIMUM_BLOCK_DIMENSION)\n self._block_size_z_spinbox.setDecimals(MAXIMUM_BLOCK_PRECISION)\n self._block_size_x_spinbox.setSingleStep(float(\"1e-{}\".format(MAXIMUM_BLOCK_PRECISION)))\n\n # Group Name\n self._group_name_lbl = PySide2.QtWidgets.QLabel(\"Maya Group Name\")\n self._group_name_le = PySide2.QtWidgets.QLineEdit()\n\n # Object Block Group\n self._block_group_box = PySide2.QtWidgets.QGroupBox()\n self._block_group_box.setTitle(\"Object Block Settings\")\n\n # Object Blocks\n self._object_blocks = dict()\n for blk_type in VALID_BLOCK_TYPES:\n self._object_blocks[blk_type] = dict()\n self._object_blocks[blk_type][\"group\"] = PySide2.QtWidgets.QGroupBox()\n self._object_blocks[blk_type][\"group\"].setTitle(blocks.BLOCK_TYPE_STR[blk_type])\n self._object_blocks[blk_type][\"group\"].setStyleSheet(\"QGroupBox{border: 5px solid #444444;}\")\n\n # Path\n self._object_blocks[blk_type][\"pth_lbl\"] = PySide2.QtWidgets.QLabel(\"Path\")\n self._object_blocks[blk_type][\"pth_le\"] = PySide2.QtWidgets.QLineEdit()\n\n # Weight\n self._object_blocks[blk_type][\"weight_lbl\"] = PySide2.QtWidgets.QLabel(\"Weight\")\n self._object_blocks[blk_type][\"weight_spinbox\"] = PySide2.QtWidgets.QDoubleSpinBox()\n self._object_blocks[blk_type][\"weight_spinbox\"].setMinimum(0)\n self._object_blocks[blk_type][\"weight_spinbox\"].setMaximum(MAXIMUM_WEIGHT_DIMENSION)\n self._object_blocks[blk_type][\"weight_spinbox\"].setDecimals(MAXIMUM_WEIGHT_PRECISION)\n self._object_blocks[blk_type][\"weight_spinbox\"].setSingleStep(\n float(\"1e-{}\".format(MAXIMUM_WEIGHT_PRECISION)))\n\n # Buttons\n self._cancel_btn = PySide2.QtWidgets.QPushButton(\"Cancel\")\n self._generate_btn = PySide2.QtWidgets.QPushButton(\"Generate\")" ]
[ "0.70154065", "0.65180445", "0.6351398", "0.6309845", "0.6266637", "0.6256561", "0.6011848", "0.5990534", "0.5953611", "0.5917834", "0.5880908", "0.57732254", "0.57251745", "0.5692181", "0.5686156", "0.5669572", "0.5639852", "0.5622619", "0.5615317", "0.56069106", "0.55989015", "0.5595113", "0.55787504", "0.55748636", "0.55655986", "0.55574757", "0.5548688", "0.5544459", "0.5537162", "0.5522199" ]
0.74142814
0
Force all list calls after this to return 'consistent' results. You should generally not call this from your application logic; instead, your application code should handle inconsistent lists gracefully.
def force_consistent_list(self): self._force_consistent_list = True return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _filter_results_for_eventual_consistency(self, results):\n if self.list_count > self.NUM_INCONSISTENT_LISTS:\n return results\n else:\n num_remaining_lists = self.NUM_INCONSISTENT_LISTS - self.list_count\n # Don't start from offset of 0, so short lists (which will be more\n # common in tests) won't be able to ignore consistency issues.\n filtered_results = results[num_remaining_lists::self.NUM_INCONSISTENT_LISTS]\n return filtered_results", "def test_update_checklists_index_ignored_on_get(self):\r\n update_url = self.get_url(1)\r\n\r\n returned_checklists = json.loads(self.client.get(update_url).content)\r\n for pay, resp in zip(self.get_persisted_checklists(), returned_checklists):\r\n self.compare_checklists(pay, resp)", "def testRetrievingWithoutSpecifyingStart(self):\n cached_items = cached_list_logic.getCachedItems('test_list', limit=3)\n self.assertListEqual([self.item1, self.item2, self.item3], cached_items)", "def testRetrievingWithoutSpecifyingLimit(self):\n cached_items = cached_list_logic.getCachedItems('test_list', start=2)\n self.assertListEqual([self.item3, self.item4, self.item5], cached_items)", "def test_update_checklists_no_index(self):\r\n returned_checklists = json.loads(self.client.get(self.checklists_url).content)\r\n # Verify that persisted checklists do not have expanded action URLs.\r\n # compare_checklists will verify that returned_checklists DO have expanded action URLs.\r\n pers = self.get_persisted_checklists()\r\n self.assertEqual('CourseOutline', get_first_item(pers[1]).get('action_url'))\r\n for pay, resp in zip(pers, returned_checklists):\r\n self.compare_checklists(pay, resp)", "def refresh_list(self):\n if self._dominfo_lock.acquire(False):\n try:\n return self._refresh_list()\n finally:\n self._dominfo_lock.release()\n else:\n # wait until the refresh done by the other party is complete\n with self._dominfo_lock:\n pass", "def test_method_list_all(self):\n\n locations_list = Location.list()\n\n # returned object should be a list\n self.assertIsInstance(locations_list, list)\n\n # make sure items returned are not duplicated. \n location_set = set(locations_list)\n self.assertEqual(len(locations_list), len(location_set))\n \n # ensure the types of the returned items are all 'Location'\n types = [type(location) for location in locations_list]\n self.assertEqual(len(set(types)), 1)\n self.assertEqual(types[0], Location)", "def getRefreshList(self, startIndex=0, force=False):", "def populate_list(self):\n self._list.value = self.__get_queryset()\n # force the list to be updated\n self._list.mark_to_update_client()", "def test_get_list(self):\n pass", "def test_get_list_empty(self):\r\n result = self.get_json(self.LIST_URI)\r\n self.assertEqual(result[\"count\"], 0)\r\n self.assertIsNone(result[\"next\"])\r\n self.assertIsNone(result[\"previous\"])\r\n self.assertEqual(result[\"results\"], [])", "def sync_all_lists(self):\r\n print(\"Started syncing influencer master lists with DB\")\r\n screen_names_on_lists = []\r\n self._add_or_update(screen_names_on_lists)\r\n print(\"Removing entries which are no longer on any list\")\r\n self._delete_entries_not_in_list(screen_names_on_lists) # remove entries from DB if they are on no list\r\n print(\"Sync complete\")", "def refreshLists(self):\n \n contacts = sorted(contact.getContacts(self.db), key = lambda contact: unicode(contact))\n messages = sorted(message.getMessages(self.db), key = lambda message: unicode(message))\n self.userList.replaceList([(unicode(c), c) for c in contacts])\n self.messageList.replaceList([(unicode(m), m) for m in messages])", "def __noop_list(self, *args, **kwargs):\n return []", "def check_results(self):\n\n\t\twhile True:\n\n\t\t\t# If no checks left, stop\n\t\t\tif len(self._check_results) == 0:\n\t\t\t\tbreak\n\n\t\t\t# Return earliest result and remove from list\n\t\t\tyield self._check_results.pop(0)", "def reset(self):\r\n self.results = []\r\n return self.results", "def test_addsListingsFromAPIs(self):\n DataManagerUnitTest.dm.reload()\n result = DataManagerUnitTest.dm.onlineStoreDatabase.getListings()\n self.assertEqual(len(DataManagerUnitTest.dm.getAllListings()), len(result))", "def test_queryset_flush_list(self):\r\n q = Addon.objects.all()\r\n objects = list(q) # Evaluate the queryset so it gets cached.\r\n base.invalidator.add_to_flush_list({q.flush_key(): ['remove-me']})\r\n cache.set('remove-me', 15)\r\n\r\n Addon.objects.invalidate(objects[0])\r\n assert cache.get(q.flush_key()) is None\r\n assert cache.get('remove-me') is None", "def upgrade(self, results, reverse = False):\r\n \r\n scored_list = self.scored_list\r\n otherdocs = results.docs\r\n arein = [docnum for docnum in scored_list if docnum in otherdocs]\r\n notin = [docnum for docnum in scored_list if docnum not in otherdocs]\r\n \r\n if reverse:\r\n self.scored_list = notin + arein\r\n else:\r\n self.scored_list = arein + notin", "def handleList(self, _): # pylint: disable=invalid-name", "def test_list_format(self) -> None:\n r = self.perform_request('list', False)\n self.assert_json_schema(r.json(), self.get_list_schema())", "def test_wantlist(self):\n # Fetch the user/wantlist from the filesystem\n u = self.d.user('example')\n self.assertEqual(len(u.wantlist), 3)\n\n # Stub out expected responses\n self.m._fetcher.fetcher.responses = {\n '/users/example/wants/5': (b'{\"id\": 5}', 201),\n '/users/example/wants/1': (b'', 204),\n }\n\n # Now bind the user to the memory client\n u.client = self.m\n\n u.wantlist.add(5)\n method, url, data, headers = self.m._fetcher.last_request\n self.assertEqual(method, 'PUT')\n self.assertEqual(url, '/users/example/wants/5')\n\n u.wantlist.remove(1)\n method, url, data, headers = self.m._fetcher.last_request\n self.assertEqual(method, 'DELETE')\n self.assertEqual(url, '/users/example/wants/1')", "def test_list_ordering(self) -> None:\n list1 = List.objects.create()\n item1 = Item.objects.create(list=list1, text=\"i1\")\n item2 = Item.objects.create(list=list1, text=\"item 2\")\n item3 = Item.objects.create(list=list1, text=\"3\")\n self.assertEqual(list(Item.objects.all()), [item1, item2, item3])", "def test_list_identity(self):\n pass", "def __size_restriction_correct_list_list(self):\n\n strTestName = 'List size higher than the size of other list (correct)'\n RxCSObject = _RxCSobject()\n\n # Firstly, let us define a reference parameter\n RxCSObject.paramAddMan('lRefParameter1', 'Ref. parameter')\n RxCSObject.paramType('lRefParameter1', list)\n\n # Now, let us define a list\n RxCSObject.paramAddMan('parameter1', 'List parameter')\n RxCSObject.paramType('parameter1', list)\n RxCSObject.paramSizEq('parameter1', 'lRefParameter1', mul=0.5)\n\n RxCSObject.lRefParameter1 = [21, 22, 23, 24, 25, 26]\n RxCSObject.parameter1 = [11, 12, 13]\n\n self.__parametersCheck_error(RxCSObject, 'correct', strTestName)", "def merge_cached_results(*results):\r\n if len(results) == 1:\r\n return list(results[0])\r\n\r\n #make sure the sorts match\r\n sort = results[0].query._sort\r\n assert(all(r.query._sort == sort for r in results[1:]))\r\n\r\n def thing_cmp(t1, t2):\r\n for i, s in enumerate(sort):\r\n #t1 and t2 are tuples of (fullname, *sort_cols), so we can\r\n #get the value to compare right out of the tuple\r\n v1, v2 = t1[i + 1], t2[i + 1]\r\n if v1 != v2:\r\n return cmp(v1, v2) if isinstance(s, asc) else cmp(v2, v1)\r\n #they're equal\r\n return 0\r\n\r\n all_items = []\r\n for r in results:\r\n r.fetch()\r\n all_items.extend(r.data)\r\n\r\n #all_items = Thing._by_fullname(all_items, return_dict = False)\r\n return [i[0] for i in sorted(all_items, cmp = thing_cmp)]", "def test_get_filtered_list(self):\n flexmock(errata).should_receive(\"Advisory\").and_return(None)\n\n response = flexmock(status_code=200)\n response.should_receive(\"json\").and_return(test_structures.example_erratum_filtered_list)\n\n flexmock(errata.requests).should_receive(\"get\").and_return(response)\n\n res = errata.get_filtered_list()\n self.assertEqual(2, len(res))", "def _fit_content_lists(self, requested_lists):\n raise requested_lists > 0 or AssertionError\n if requested_lists != self._num_contents:\n while requested_lists < self._num_contents:\n self._pop_content_list()\n\n while requested_lists > self._num_contents:\n self._push_content_list()", "def test_ip_lists_get_command_for_success(mock_client, ip_lists_success, monkeypatch):\n monkeypatch.setattr(\n illumio.pce.PolicyComputeEngine._PCEObjectAPI,\n \"get\",\n lambda *a, **k: [IPList.from_json(ip_list) for ip_list in ip_lists_success],\n )\n resp = ip_lists_get_command(\n mock_client,\n {\n \"max_results\": \"1\",\n \"fqdn\": \"app\",\n \"ip_address\": \"127.0.0.1\",\n \"name\": \"a\",\n \"description\": \"a\",\n },\n )\n\n assert resp.raw_response == remove_empty_elements(ip_lists_success)", "def test_partial_updates(self):\r\n final = range(10)\r\n initial = final[3:7]\r\n m1 = TestListModel.create(int_list=initial)\r\n\r\n m1.int_list = final\r\n m1.save()\r\n\r\n m2 = TestListModel.get(partition=m1.partition)\r\n assert list(m2.int_list) == final" ]
[ "0.6987481", "0.6444678", "0.58526874", "0.58275646", "0.57697743", "0.57474655", "0.56870705", "0.56703025", "0.5668953", "0.56320864", "0.56245184", "0.5613652", "0.5580343", "0.55586827", "0.5510964", "0.549497", "0.54739416", "0.54650426", "0.5443179", "0.54411036", "0.54401493", "0.5440059", "0.54366046", "0.5394683", "0.53819406", "0.53766847", "0.5375651", "0.5371254", "0.5342142", "0.5339361" ]
0.74617606
0
Simulates S3's eventual consistency by filtering out items from the result set. Results are filtered out by only returning every nth result. This has the nice property of interleaving returned results, so consumers can't make assumptions that treat S3 lists as append only. The initial list call also won't return any results if there's only a single item in the list, which will make it harder for tests to ignore consistency issues.
def _filter_results_for_eventual_consistency(self, results): if self.list_count > self.NUM_INCONSISTENT_LISTS: return results else: num_remaining_lists = self.NUM_INCONSISTENT_LISTS - self.list_count # Don't start from offset of 0, so short lists (which will be more # common in tests) won't be able to ignore consistency issues. filtered_results = results[num_remaining_lists::self.NUM_INCONSISTENT_LISTS] return filtered_results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testRetrievingWithoutSpecifyingLimit(self):\n cached_items = cached_list_logic.getCachedItems('test_list', start=2)\n self.assertListEqual([self.item3, self.item4, self.item5], cached_items)", "def testRetrievingWithoutSpecifyingStart(self):\n cached_items = cached_list_logic.getCachedItems('test_list', limit=3)\n self.assertListEqual([self.item1, self.item2, self.item3], cached_items)", "def mxbulksniff(items, ignore_errors=True):\n cache = {}\n for i in items:\n yield mxsniff(i, ignore_errors, cache)", "def filter_thumbnail_only(_list):\n result = list()\n for count, href in enumerate(_list):\n if count > 15:\n break\n if get_verified_response(get_thumbnail(href)).status == 200:\n result.append(href)\n return result", "def cleanse_priest_list(priests_list):", "def filter_n(function, iterable, **kwargs) -> iter:\n n_pass, n_fail = 0, 0\n\n for item in iterable:\n if function(item, **kwargs):\n yield item\n n_pass += 1\n else:\n n_fail += 1\n\n LOGGER.info(\"Filter %s: output %s rows (dropped %s rows)\", function.__name__, n_pass, n_fail)", "def get_file_list(\n self,\n file_regex = r'.*'):\n s3Contents = []\n #Use list_objects_v2 via kwargs since there could be\n #more than 1000 objects (single return limit)\n kwargs = {'Bucket': self.bucket, 'Prefix':self.key}\n while True:\n try:\n resp = self.s3.list_objects_v2(**kwargs)\n except:\n resp = None\n self.logger.error('Unable to reach s3 bucket')\n sys.exit(1)\n if resp.get(\"Contents\"):\n try:\n f_regex = re.compile(file_regex)\n #python 3.8+ required for walrus operator\n s3Contents += [f['Key'] for f in resp['Contents'] if (match := re.search(f_regex, f['Key']))]\n except Exception as e:\n self.logger.exception(e)\n self.logger.error('failed to filter s3 folder. Bucket: %s and location: %s',\n self.bucket,\n self.key)\n sys.exit(1)\n try:\n kwargs['ContinuationToken'] = resp['NextContinuationToken']\n except KeyError:\n break\n if not s3Contents:\n self.logger.warning(\n 'No files were returned from s3 bucket: %s and location: %s filtering by %s',\n self.bucket,\n self.key,\n file_regex)\n return s3Contents", "def filterduplicates(client, repeatfactor, tracks): # {{{1\n trackstofilter = client.playlist()\n if len(trackstofilter) < repeatfactor:\n repeatfactor = len(trackstofilter)\n trackstofilter = trackstofilter[-repeatfactor : -1]\n return [t for t in tracks if not t[1]['file'] in trackstofilter]", "def _purge_duplicates(f):\n @functools.wraps(f)\n def wrapper(*args, **kwds):\n ret_val = f(*args, **kwds)\n new_list = []\n for item in ret_val:\n if item in new_list:\n continue\n new_list.append(item)\n return new_list\n return wrapper", "def keep_n(self, n=100):\n before = self.item_count()\n\n item_count = self.item_count()\n if item_count > n: self.filter(self.sample(n))\n\n after = self.item_count()\n with msg(f'Keeping (at most) {n} items: {after} of {before}', done=False, enabled=self.output):pass", "def test_filter_output_third():\n data = [\n {\n \"name\": \"Bill\",\n \"last_name\": \"Gilbert\",\n \"occupation\": \"was here\",\n \"type\": \"person\",\n },\n {\"is_dead\": True, \"kind\": \"parrot\", \"type\": \"bird\", \"name\": \"polly\"},\n {\"is_dead\": False, \"kind\": \"parrot\", \"type\": \"bird\", \"name\": \"billy\"},\n ]\n\n actual_result = make_filter(name=\"billy\", type=\"bird\").apply(data)\n expected_result = [data[2]]\n assert actual_result == expected_result", "def my_filter(function,lst):\n return list(x for x in lst if function(x))", "def remove_duplicates_for_fetch(items: list, last_fetched_ids: list) -> list:\n return [\n item\n for item in items\n if item.get('id') and item.get('id') not in last_fetched_ids\n ]", "def _sublist(self, lst, exclude, length):\n for sublist in itertools.combinations([e for e in lst if e not in exclude], length):\n yield list(sublist)", "def dividelists(oldlst, tweet):\n for each in oldlst:\n if each == tweet:\n print('REMOVED', each[0], each[1], each[2], each[3])\n print('true:', each)\n oldlst.remove(each)\n print('each:', each)\n else:\n print('wut')\n pass\n return oldlst", "def trim_items(self, items):\r\n\t\tlogger.debug(\"Enter\")\r\n\t\t\r\n\t\tif self.transactions:\r\n\t\t\tall_items = set.union(*[self.transactions[u][-1] for u in self.transactions.keys()])\r\n\t\telse:\r\n\t\t\treturn items\r\n\t\t\t\r\n\t\ttmp = items.copy()\r\n\t\t\r\n\t\tfor i in items:\r\n\t\t\tif i in all_items:\r\n\t\t\t\tlogger.debug(\"Removing %r\" % i)\r\n\t\t\t\ttmp.remove(i)\r\n\t\t\t\t\r\n\t\tlogger.debug(\"Exit\")\r\n\t\treturn tmp", "def testRemovingDuplicates(self):\n\n item1 = {KEY: 'one', 'name': 'foo'}\n item2 = {KEY: 'two', 'name': 'bar'}\n item3 = {KEY: 'three', 'name': 'baz'}\n dup_item1 = {KEY: 'one', 'name': 'foo'}\n dup_item2 = {KEY: 'two', 'name': 'qux'}\n\n list_with_duplicates = [item1, item2, item3, dup_item1, dup_item2]\n # duplicate items should not be present in the cached list\n expected_list = [item1, item2, item3]\n\n cached_list_logic.setCacheItems('test_list', list_with_duplicates)\n cached_list = cached_list_model.CachedList.get_by_id('test_list')\n self.assertListEqual(cached_list.list_data, expected_list)", "def thin_list(list_to_thin, things_to_thin):\n for badness in things_to_thin:\n if badness in list_to_thin:\n list_to_thin.remove(badness)\n return list_to_thin", "def filter_list(prev_list, current_list, zeta):\n filtered_list = []\n for i, current_val in enumerate(current_list):\n prev_val = prev_list[i]\n filtered_list.append(\n moving_average_filter(current_val, prev_val, zeta))\n return filtered_list", "def split_list(items, pred):\n\n thisresult = []\n results = [thisresult]\n for i in items:\n thisresult.append(i)\n if pred(i):\n thisresult = []\n results.append(thisresult)\n return results", "def remove_outliers(lst):\n slst = sorted(lst)\n three_iqr = 3 * get_IQR(lst)\n low_boundary = float(np.percentile(lst, 25)) - three_iqr\n high_boundary = float(np.percentile(lst, 75)) + three_iqr\n\n return filter(lambda x: x >= low_boundary and x <= high_boundary, slst)", "def listops_uniq(list_a):\r\n retlist = []\r\n for item in list_a:\r\n if item not in retlist:\r\n retlist.append(item)\r\n\r\n return retlist", "def _lists_of_n(self, myList, n):\n if len(myList) <= 0:\n return []\n \n if len(myList) <= n:\n return [ myList ]\n\n ret = []\n currentList = []\n count = 0\n for item in myList:\n count = count + 1\n currentList.append(item)\n if count % n == 0:\n ret.append(currentList)\n currentList = []\n if len(currentList) > 0:\n ret.append(currentList)\n return ret", "def remove_every_other(lst):\n return [ea for ea in lst if lst.index(ea) % 2 == 0 ]", "def get_viewed_products(list):\n \n #initialize cart with random ASIN\n params = {\"Item.1.ASIN\":'B000DLB2FI', 'Item.1.Quantity':1}\n cart = amazon.CartCreate(**params)\n root = objectify.fromstring(cart)\n cartid = _safe_get_element_text('Cart.CartId', root)\n hmac = _safe_get_element_text('Cart.HMAC', root)\n\n #create empty list of similar products\n svlist = []\n\n count = 0 #testing\n\n #iterate through list of original ASINs and retrieve also viewed products\n print 'Retrieving \\\"Also Viewed\\\" Products!' #testing\n for item in list:\n #add to cart\n amazon.CartClear(CartId=cartid, HMAC=hmac)\n params = {\"Item.1.ASIN\":item, 'Item.1.Quantity':1, 'CartId':cartid, 'HMAC':hmac, 'ResponseGroup':'Cart,CartSimilarities'}\n cart = amazon.CartAdd(**params)\n root = objectify.fromstring(cart)\n \n count +=1 #testing\n print count #testing\n\n #iterate through each similar product and add to list\n #issue with ASIN = B004NK6DFE <- fixed\n if \"SimilarViewedProduct\" in cart:\n for item2 in root.Cart.SimilarViewedProducts.SimilarViewedProduct:\n if _safe_get_element_text('Title', item2) is not None:\n svlist.append({'Original ASIN':item,\n 'Associated ASIN':item2.ASIN,\n 'Title':item2.Title,\n 'Price': None,\n 'Currency Code':None,\n 'Relationship':\"Also Viewed\"})\n\n print 'Total # of \\\"Also Viewed\\\" Products: ' + str(len(svlist))\n count = 0 #testing\n\n #iterate through each also viewed prodcut and obtain lowest price\n print 'Retrieving prices!' #testing\n for item in svlist:\n if item['Title'] is not None:\n title = filter(lambda x: x in string.printable, item['Title'].text) #remove non-ascii\n item['Title'] = title\n\n count+=1 #testing\n print count #testing\n\n pricelist = amazon.ItemLookup(ItemId=item['Associated ASIN'],ResponseGroup=\"OfferSummary,VariationSummary\")\n priceroot = objectify.fromstring(pricelist)\n #conditionals to check if parent or child ASIN or OOS, Variation pricing can only be called on parent\n if _safe_get_element_text(\"Items.Item.OfferSummary.LowestNewPrice.FormattedPrice\", priceroot) is not None: #Child ASIN\n item['Price'] = _safe_get_element_text('Items.Item.OfferSummary.LowestNewPrice.FormattedPrice', priceroot)\n item['Currency Code'] = _safe_get_element_text('Items.Item.OfferSummary.LowestNewPrice.CurrencyCode', priceroot)\n else:\n item['Price'] = _safe_get_element_text('Items.Item.VariationSummary.LowestPrice.FormattedPrice', priceroot)\n item['Currency Code'] = _safe_get_element_text('Items.Item.VariationSummary.LowestPrice.CurrencyCode', priceroot)\n return svlist", "def test_list_filtering(self):\n # Test the \"all\" response.\n url = '/api/users/?all=true'\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, self.contract_user.email)\n self.assertContains(response, self.del_user.email)\n self.assertContains(response, self.shared.email)\n # Test filtering by ad_deleted.\n url = '/api/users/?ad_deleted=true'\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, self.del_user.email)\n self.assertNotContains(response, self.user1.email)\n url = '/api/users/?ad_deleted=false'\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertNotContains(response, self.del_user.email)\n self.assertContains(response, self.user1.email)\n # Test filtering by email (should return only one object).\n url = '/api/users/?email={}'.format(self.user1.email)\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n j = response.json()\n self.assertEqual(len(j['objects']), 1)\n self.assertContains(response, self.user1.email)\n self.assertNotContains(response, self.user2.email)\n # Test filtering by GUID (should return only one object).\n url = '/api/users/?ad_guid={}'.format(self.user1.ad_guid)\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n j = response.json()\n self.assertEqual(len(j['objects']), 1)\n self.assertContains(response, self.user1.email)\n self.assertNotContains(response, self.user2.email)\n # Test filtering by cost centre (should return all, inc. inactive and contractors).\n url = '/api/users/?cost_centre={}'.format(self.cc2.code)\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, self.user2.email)\n self.assertContains(response, self.contract_user.email)\n self.assertContains(response, self.del_user.email)\n self.assertNotContains(response, self.user1.email)\n self.assertNotContains(response, self.shared.email) # Belongs to CC1.\n # Test filtering by O365 licence status.\n self.user1.o365_licence = True\n self.user1.save()\n url = '/api/users/?o365_licence=true'\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, self.user1.email)\n self.assertNotContains(response, self.user2.email)", "def Collection_select_cheap(C:list, n: float)->list:\r\n result = []\r\n for rest in C:\r\n if Restaurant_is_cheap(rest, n) == True:\r\n result.append(rest)\r\n return result", "def remove_duplicates(self,list_):\r\n ret =[]\r\n\r\n for item in list_:\r\n if item not in ret:\r\n ret.append(item)\r\n removed = len(list_)-len(ret)\r\n logger.info('%d duplicate%s removed.' %(removed,plural_or_not(removed)))\r\n return ret", "def apply(self,\n input_list: Sequence[types.Artifact]) -> Sequence[types.Artifact]:\n # Verify that n, min_span, and offset are >= to their minimum values.\n if self.n < 1:\n raise ValueError(f'n must be > 0, but was set to {self.n}.')\n\n if self.offset < 0:\n raise ValueError(f'offset must be >= 0, but was set to '\n f'{self.offset}.')\n\n if self.min_span < 0:\n raise ValueError(f'min_span must be >= 0, but was set to '\n f'{self.min_span}.')\n\n # Only consider artifacts that have both \"span\" and \"version\" in PROPERTIES\n # with PropertyType.INT.\n valid_artifacts = []\n for artifact in input_list:\n if artifact.PROPERTIES is None:\n continue\n\n if ('span' not in artifact.PROPERTIES or\n artifact.PROPERTIES['span'].type != types.artifact.PropertyType.INT):\n continue\n\n if ('version' not in artifact.PROPERTIES or\n artifact.PROPERTIES['version'].type !=\n types.artifact.PropertyType.INT):\n continue\n\n valid_artifacts.append(artifact)\n\n if not valid_artifacts:\n return []\n\n valid_artifacts.sort(key=lambda a: a.span, reverse=True)\n\n # Only keep artifacts with spans >= self.min_span.\n spans = list(set([a.span for a in valid_artifacts]))\n spans = [s for s in spans if s >= self.min_span]\n spans.sort(reverse=True)\n\n if not spans:\n return []\n\n # Only keep artifacts with the n latest spans, accounting for\n # offset.\n if self.keep_all_spans:\n spans = spans[self.offset:]\n else:\n spans = spans[self.offset:self.offset + self.n]\n\n valid_artifacts = [a for a in valid_artifacts if a.span in spans]\n\n if self.keep_all_versions:\n return valid_artifacts\n\n # Only keep artifacts with the latest version.\n span_artifact_map = {}\n for artifact in valid_artifacts:\n span = artifact.span\n\n if span not in span_artifact_map:\n span_artifact_map[span] = artifact\n continue\n\n # Latest version is defined as the largest version. Ties broken by id.\n span_artifact_map[span] = max(\n artifact, span_artifact_map[span], key=lambda a: (a.version, a.id))\n\n return list(span_artifact_map.values())", "def post_processing(f,param_dict):\r\n must_have = param_dict['must_have']\r\n cannot_be_together = param_dict['cannot_be_together']\r\n\r\n # must have \r\n tmp = list()\r\n for itemset in f:\r\n if set(itemset).intersection(set(must_have)): \r\n tmp.append(itemset)\r\n\r\n f = tmp[:]\r\n\r\n # cannot be together\r\n for itemset in f:\r\n for cbt in cannot_be_together:\r\n if set(cbt) <= set(itemset):\r\n tmp.remove(itemset)\r\n \r\n return tmp" ]
[ "0.60967356", "0.59897625", "0.57029206", "0.5662621", "0.53988785", "0.5391997", "0.5382999", "0.5382957", "0.5312985", "0.5295677", "0.52932", "0.52842057", "0.5268883", "0.5228543", "0.5221372", "0.5189532", "0.5145578", "0.50993717", "0.5041751", "0.5038586", "0.50338745", "0.5013703", "0.49898136", "0.49876252", "0.49856794", "0.49834672", "0.49805507", "0.49641544", "0.49485463", "0.49440983" ]
0.6864502
0
Convenience function to tack on columns that are necessary for the functionality of the tool but yet have not been specifically requested by the user.
def _add_necessary_columns(args, custom_columns): # we need to add the variant's chrom, start and gene if # not already there. if custom_columns.find("gene") < 0: custom_columns += ", gene" if custom_columns.find("start") < 0: custom_columns += ", start" return custom_columns
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _these_columns_cannot_annotate_exp_cons(self):\n _cols = set([]) #\n for param_name, req_cols in self.required_columns.items():\n _cols |= req_cols\n\n return _cols | self.other_useful_columns", "def columns_setup(self):\n self.required = None\n self.addition = None\n self.deletion = None\n self.retention = None\n self.rename = None", "def add_missing_columns(input_column_info, cuts):\n output_column_info = input_column_info\n num_added_cols=0\n for cut in cuts:\n # the first entry of the cut tuple contains the \n # relevant column number of the ASCII data\n colnum = cut[0]\n # Check to see whether the column was not included in \n # the list of columns requested of the output data structure\n if colnum not in np.array(output_column_info)[:,0].astype(int):\n # Create a glaring, fake name for the column, \n # assume it is a float, and add it to column_info\n num_added_cols += 1\n name = 'unlisted_col'+str(num_added_cols)+'_with_cut'\n datatype = 'float'\n new_tuple = (colnum, name, datatype)\n output_column_info.append(new_tuple)\n # Now the halo catalog reader will additionally include \n # any columns used to make cuts that were omitted from \n # the columns requested to be saved to an output data structure. \n # This makes the coding simpler, and helps avoid the \n # hidden-cut problem. \n\n return output_column_info", "def _get_extra_column_names(self):\n if isinstance(self.extra_columns, int):\n my_columns = \"%s unnamed columns\" % self.extra_columns\n elif isinstance(self.extra_columns, list):\n if all([isinstance(X, tuple) for X in self.extra_columns]):\n my_columns = \",\".join([X[0] for X in self.extra_columns])\n elif all([isinstance(X, str) for X in self.extra_columns]):\n my_columns = \",\".join(self.extra_columns)\n\n return my_columns", "def get_non_feature_columns():\n return ['teamid', 'op_teamid', 'matchid', 'competitionid', 'seasonid',\n 'goals', 'op_goals', 'points', 'timestamp', 'team_name', \n 'op_team_name']", "def _get_columns(self):\n columns = []\n for column in self.plate_meta['columns']:\n columns.append(column['name'])\n self.columns = columns", "def exclude_cols(self, *_, **__) -> Tuple[str, ...]:", "def get_cols_dummy():", "def get_all_columns(self):\n df = self.get_prep_data()\n col = [c for c in df.columns if c not in ['target', 'idd', 'ft_data_dt']]\n return col", "def add_columns(self, **columns):\n return self.as_dataframe(self.data.assign(**columns))", "def columns(self):\r\n _columns = self.base_columns + self.veg_columns\r\n return _columns", "def remove_insertion_columns(self):\n cols = self.get_insertion_columns()\n s = []\n a = 0\n for b in cols:\n if b > a:\n s.append((a, b))\n a = b + 1\n s.append((a, len(self.col_labels)))\n for name, seq in list(self.items()):\n news = []\n for c in s:\n news.append(seq[c[0]:c[1]])\n self[name] = \"\".join(news)", "def load_columns(self):\n pass", "def columns(self):\n return self._meta.columns + self.new_columns", "def add_feature_columns(self, feature_columns: typing.List[str]):\n self.feature_columns += feature_columns", "def ComputeUnshownColumns(results, shown_columns, config, built_in_cols):\n unshown_set = set() # lowercases column names\n unshown_list = [] # original-case column names\n shown_set = {col.lower() for col in shown_columns}\n labels_already_seen = set() # whole labels, original case\n\n def _MaybeAddLabel(label_name):\n \"\"\"Add the key part of the given label if needed.\"\"\"\n if label_name.lower() in labels_already_seen:\n return\n labels_already_seen.add(label_name.lower())\n if '-' in label_name:\n col, _value = label_name.split('-', 1)\n _MaybeAddCol(col)\n\n def _MaybeAddCol(col):\n if col.lower() not in shown_set and col.lower() not in unshown_set:\n unshown_list.append(col)\n unshown_set.add(col.lower())\n\n # The user can always add any of the default columns.\n for col in config.default_col_spec.split():\n _MaybeAddCol(col)\n\n # The user can always add any of the built-in columns.\n for col in built_in_cols:\n _MaybeAddCol(col)\n\n # The user can add a column for any well-known labels\n for wkl in config.well_known_labels:\n _MaybeAddLabel(wkl.label)\n\n phase_names = set(itertools.chain.from_iterable(\n (phase.name.lower() for phase in result.phases) for result in results))\n # The user can add a column for any custom field\n field_ids_alread_seen = set()\n for fd in config.field_defs:\n field_lower = fd.field_name.lower()\n field_ids_alread_seen.add(fd.field_id)\n if fd.is_phase_field:\n for name in phase_names:\n phase_field_col = name + '.' + field_lower\n if (phase_field_col not in shown_set and\n phase_field_col not in unshown_set):\n unshown_list.append(phase_field_col)\n unshown_set.add(phase_field_col)\n elif field_lower not in shown_set and field_lower not in unshown_set:\n unshown_list.append(fd.field_name)\n unshown_set.add(field_lower)\n\n if fd.field_type == tracker_pb2.FieldTypes.APPROVAL_TYPE:\n approval_lower_approver = (\n field_lower + tracker_constants.APPROVER_COL_SUFFIX)\n if (approval_lower_approver not in shown_set and\n approval_lower_approver not in unshown_set):\n unshown_list.append(\n fd.field_name + tracker_constants.APPROVER_COL_SUFFIX)\n unshown_set.add(approval_lower_approver)\n\n # The user can add a column for any key-value label or field in the results.\n for r in results:\n for label_name in tracker_bizobj.GetLabels(r):\n _MaybeAddLabel(label_name)\n for field_value in r.field_values:\n if field_value.field_id not in field_ids_alread_seen:\n field_ids_alread_seen.add(field_value.field_id)\n fd = tracker_bizobj.FindFieldDefByID(field_value.field_id, config)\n if fd: # could be None for a foreign field, which we don't display.\n field_lower = fd.field_name.lower()\n if field_lower not in shown_set and field_lower not in unshown_set:\n unshown_list.append(fd.field_name)\n unshown_set.add(field_lower)\n\n return sorted(unshown_list)", "def columns(self):\n\n return None", "def dependent_cols():\n\n return ...", "def get_cols_drop():", "def orig_cols():\n return ['Q-E','ZN-E','PH-E','DBO-E','DQO-E','SS-E','SSV-E','SED-E','COND-E','PH-P','DBO-P','SS-P','SSV-P',\n 'SED-P','COND-P','PH-D','DBO-D','DQO-D','SS-D','SSV-D','SED-D','COND-D','PH-S','DBO-S','DQO-S',\n 'SS-S','SSV-S','SED-S','COND-S','RD-DBO-P','RD-SS-P','RD-SED-P','RD-DBO-S','RD-DQO-S','RD-DBO-G',\n 'RD-DQO-G','RD-SS-G','RD-SED-G']", "def getExtraColumnMetadata(self, study_id):\n try:\n con = self.getMetadataDatabaseConnection()\n results = con.cursor()\n extra_columns = {}\n con.cursor().callproc('qiime_assets.get_study_extra_columns', [study_id, results])\n for row in results:\n extra_columns[row[1]] = {'table_level':row[0], 'description':row[2], 'data_type':row[3]}\n return extra_columns\n except Exception, e: \n raise Exception('Exception caught in getExtraColumnMetadata(): %s.\\nThe error is: %s' % (type(e), e))", "def drop_extra_columns(self):\n table = self.data.loc[:, self._required_columns]\n return self.as_dataframe(table)", "def set_user_added_columns(self):\n if 'manual' not in self.keys():\n self['manual'] = ''", "def get_table_columns(self):\n raise NotImplementedError(\"Please implement this method\")", "def _addcolumns(self, columnname, columndata=\"\"):\n self[columnname] = columndata", "def output_columns(self) -> List[str]:", "def get_customer_columns():\n return cr.get_columns()", "def add_front_end_cols(df_users,model,feat_cols):\n\tdf_users['predicted_prob'] = model.predict_proba(df_users[feature_cols])[:,1] # take second column because that is \"True\"\n\n\tassert 'user_id' in test.columns, \"df_users needs to have column 'user_id' \"\n\tdf_users['user_dropdown'] = df_users.apply(lambda x: f\"User {x['user_id']} (p={x['predicted_prob']:.2f})\",axis=1)\n\tdf_users['user_emails'] = df_users.apply(lambda x: f\"User {x['user_id']} <user{x['user_id']}@email.com)\",axis=1)\n\treturn df_users.sort_values(by='predicted_prob',ascending=False)", "def get_export_columns(kind: str) -> dict:\r\n c = {\r\n 'u': {\r\n 'vendor_name': 'Vendor Name',\r\n 'number': 'Number',\r\n 'name': 'Name',\r\n 'assoc': 'Assocciated'\r\n },\r\n 'm': {\r\n 'email_address': 'Email Address',\r\n 'first_name': 'First Name',\r\n 'last_name': 'Last Name'\r\n }\r\n }\r\n columns = c['u'] # Because the matched DataFrame has all the same columns\r\n if kind == 'm': columns.update(c['m']) # as unmatched DataFrame, we use the dict.update() method\r\n return columns # to extend the columns of the unmatched DataFrame.\r", "def getStudyActualColumns(self, study_id):\n try:\n con = self.getMetadataDatabaseConnection()\n extra_columns = {}\n results = con.cursor()\n con.cursor().callproc('qiime_assets.get_study_actual_columns', [study_id, results])\n #for row in results:\n for column_name, table_name in results:\n #extra_columns[row[0]] = row[1]\n extra_columns[column_name] = table_name\n \n return extra_columns\n except Exception, e: \n raise Exception('Exception caught in addStudyActualColumns(): %s.\\nThe error is: %s' % (type(e), e))" ]
[ "0.7042445", "0.6572701", "0.6462197", "0.6344353", "0.63297266", "0.6177399", "0.61261445", "0.6061339", "0.60574806", "0.6044095", "0.60419595", "0.59884024", "0.59808314", "0.59711903", "0.5969461", "0.59672695", "0.5959676", "0.5954561", "0.5945721", "0.59399706", "0.59237444", "0.5894253", "0.5885628", "0.5882323", "0.5882173", "0.5856679", "0.58352625", "0.5814031", "0.58082134", "0.58042115" ]
0.75083494
0
Report candidate compound heterozygous mutations.
def get_compound_hets(args): gq = GeminiQuery.GeminiQuery(args.db, include_gt_cols=True) idx_to_sample = gq.idx_to_sample subjects_dict = subjects.get_subjects(gq.c) if args.columns is not None: custom_columns = _add_necessary_columns(args, str(args.columns)) query = "SELECT " + custom_columns + \ " FROM variants " + \ " WHERE (is_exonic = 1 or impact_severity != 'LOW') " else: # report the kitchen sink query = "SELECT *" + \ ", gts, gt_types, gt_phases, gt_depths, \ gt_ref_depths, gt_alt_depths, gt_quals" + \ " FROM variants " + \ " WHERE (is_exonic = 1 or impact_severity != 'LOW') " # add any non-genotype column limits to the where clause if args.filter: query += " AND " + args.filter # run the query applying any genotype filters provided by the user. gq.run(query) comp_hets = collections.defaultdict(lambda: collections.defaultdict(list)) for row in gq: gt_types = row['gt_types'] gts = row['gts'] gt_bases = row['gts'] gt_phases = row['gt_phases'] site = Site(row) # track each sample that is heteroyzgous at this site. for idx, gt_type in enumerate(gt_types): if gt_type == HET: sample = idx_to_sample[idx] if args.only_affected and not subjects_dict[sample].affected: continue # sample = "NA19002" sample_site = copy(site) sample_site.phased = gt_phases[idx] # require phased genotypes if not sample_site.phased and not args.ignore_phasing: continue sample_site.gt = gt_bases[idx] # add the site to the list of candidates # for this sample/gene comp_hets[sample][site.row['gene']].append(sample_site) # header print "family\tsample\tcomp_het_id\t" + str(gq.header) # step 2. now, cull the list of candidate heterozygotes for each # gene/sample to those het pairs where the alternate alleles # were inherited on opposite haplotypes. comp_het_id = 1 for sample in comp_hets: for gene in comp_hets[sample]: # we only care about combinations, not permutations # (e.g. only need site1,site2, not site1,site2 _and site2,site1) # thus we can do this in a ~ linear pass instead of a ~ N^2 pass for idx, site1 in enumerate(comp_hets[sample][gene]): for site2 in comp_hets[sample][gene][idx + 1:]: # expand the genotypes for this sample # at each site into it's composite # alleles. e.g. A|G -> ['A', 'G'] alleles_site1 = [] alleles_site2 = [] if not args.ignore_phasing: alleles_site1 = site1.gt.split('|') alleles_site2 = site2.gt.split('|') else: # split on phased (|) or unphased (/) genotypes alleles_site1 = re.split('\||/', site1.gt) alleles_site2 = re.split('\||/', site2.gt) # it is only a true compound heterozygote IFF # the alternates are on opposite haplotypes. if not args.ignore_phasing: # return the haplotype on which the alternate # allele was observed for this sample at each # candidate het. site. # e.g., if ALT=G and alleles_site1=['A', 'G'] # then alt_hap_1 = 1. if ALT=A, then alt_hap_1 = 0 if "," in str(site1.row['alt']) or \ "," in str(site2.row['alt']): sys.stderr.write("WARNING: Skipping candidate for sample" " %s b/c variants with mult. alt." " alleles are not yet supported. The sites are:" " %s and %s.\n" % (sample, site1, site2)) continue alt_hap_1 = alleles_site1.index(site1.row['alt']) alt_hap_2 = alleles_site2.index(site2.row['alt']) # report if # 1. phasing is considered AND the alt alleles are on # different haplotypes # OR # 2. the user doesn't care about phasing. if (not args.ignore_phasing and alt_hap_1 != alt_hap_2) \ or args.ignore_phasing: print \ "\t".join([str(subjects_dict[sample].family_id), sample, str(comp_het_id), str(site1.row)]) print \ "\t".join([str(subjects_dict[sample].family_id), sample, str(comp_het_id), str(site2.row)]) comp_het_id += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_mutation(self):\n genotype = '0|0|2|0|0|2|0|0 1|0|0|1|1|0|0|0 0|1|0|0|0|0|2|1--1 7'\n search_space = {'dil_conv_3x3', 'dil_conv_5x5', 'dil_conv_7x7',\n 'skip_connect', 'clinc_3x3', 'clinc_7x7', 'avg_pool_3x3', 'max_pool_3x3'}\n\n mutator = Mutations(search_space, prob_mutation=0.8,\n prob_resize=0.99, prob_swap=0.99)\n mutated_g = mutator(genotype)\n mutated_g = mutator(mutated_g)\n mutated_g = mutator(mutated_g)\n a, s, d = get_conf(mutated_g)\n print('---->', mutated_g)\n self.assertGreaterEqual(10, d)\n self.assertTrue(s in (0, 1))\n a = torch.tensor(a)\n d = int((a.shape[0]*2)**.5)\n start = 0\n for i in range(d):\n end = int((i+1)*(i+2)/2)\n self.assertTrue(a[start:end, :].sum() > 0)\n start = end", "def analyse ( self ) :\n odin = self.get( self.RootInTES + 'DAQ/ODIN' )\n \n ## Check for PVs\n PVs = self.get( self.RootInTES + self.InputPrimaryVertices )\n if not PVs or PVs.size() == 0:\n self.setFilterPassed( False )\n return SUCCESS\n\n ## get recontructed B+ mesons\n Bs = self.select ( 'B' , eval( self._cut % self._selection ) )\n \n if not Bs or Bs.size() == 0:\n self.setFilterPassed( False )\n return SUCCESS \n\n ## Select random candidate\n r = self.random( odin )\n n = Bs.size()\n for i in xrange( n ):\n if r <= ( float( i ) / float( n ) ): break\n B = Bs[ i ]\n \n tisTos = self.tisTosSignal( B, \"Hlt1Track(AllL0|Muon)Decision\" )\n if tisTos.tos():\n ## This has to be a clone, otherwise it doesn't work...\n self.markParticle( B.clone() )\n self.setFilterPassed( True )\n else:\n self.setFilterPassed( False )\n\n return SUCCESS", "def evaluate ( self, mutation ) :\n\t\tif isinstance( mutation , Mutation ):\n\t\t\tmutation = mutation.to_int()\n\n\t\tassert type( mutation ) is int , 'mutation must work out to a int or have an internal representaiton of int'\n\n\n\t\tfor phenotype, region in self.phenotypes.items():\n\n\t\t\tif mutation > region[0] and mutation < region[1]:\n\t\t\t\t# print phenotype\n\t\t\t\tself.counts[ phenotype ] += 1", "def analyze(self, event):\n\n gen_dme_hsm = -999\n gen_dme_hm = -999\n gen_dme_hp = -999\n gen_dme_hl = -999\n gen_dme_mixhm = -999\n gen_dme_mixhp = -999 \n gen_dme_mixhl = -999 \n\n gen_pme_hsm = -999\n gen_pme_hm = -999\n gen_pme_hp = -999\n gen_pme_hl = -999\n gen_pme_mixhm = -999\n gen_pme_mixhp = -999 \n gen_pme_mixhl = -999 \n\n self.LHE = Collection(event,\"LHEPart\")\n Gen = Collection(event,\"GenPart\")\n\n daughters = ROOT.vector('TLorentzVector')()\n daughterIDs = ROOT.vector('int')()\n\n HFinalStateIdx = []\n for gid,gen in enumerate(Gen):\n if abs(gen.pdgId) >= 21: continue\n mid = event.GenPart_genPartIdxMother[gid]\n if mid == -1: continue\n # print \"genp \", gen.pdgId, event.GenPart_pdgId[mid], self.FromH(event, gid)\n if abs(event.GenPart_pdgId[mid]) != 24: continue \n if self.FromH(event, gid) == False: continue\n HFinalStateIdx.append(gid)\n\n if len(HFinalStateIdx) != 4: \n HFinalStateIdx = self.RemoveGammaW(event, HFinalStateIdx)\n\n if len(HFinalStateIdx) != 4: \n HFinalStateIdx = self.RemoveAddHadron(event, HFinalStateIdx)\n\n LHEHFinalState = self.getLHE(event, HFinalStateIdx) \n\n if len(LHEHFinalState)!=4:\n print \"SOMETHING WENT WRONG!, WW final state\", len(LHEHFinalState), LHEHFinalState\n\n for ipart in LHEHFinalState:\n d = ROOT.TLorentzVector()\n d.SetPtEtaPhiM(LHEHFinalState[ipart][0], LHEHFinalState[ipart][1], LHEHFinalState[ipart][2], 0.)\n daughters.push_back(d)\n daughterIDs.push_back(LHEHFinalState[ipart][3]) \n\n # Check lnu pairs to catch corrupted pdgid events (in H0L1f05 part0 2016 nAODv7)\n\n if self.sample is \"H0L1f05_ToWWTo2L2Nu\" :\n if abs(daughterIDs[0]) in [11,13,15] and abs(daughterIDs[1]) in [12,14,16] and abs(daughterIDs[2]) in [12,14,16] and abs(daughterIDs[3]) in [11,13,15] :\n if self.MatchLNuIDs(daughterIDs[0], daughterIDs[1]) is False : \n print \"1st Lep-Nu pair IDs not matching : \", daughterIDs[0], daughterIDs[1]\n daughterIDs[1] = -1*numpy.sign(daughterIDs[0])*(abs(daughterIDs[0])+1) \n if self.MatchLNuIDs(daughterIDs[3], daughterIDs[2]) is False : \n print \"2nd Lep-Nu pair IDs not matching : \", daughterIDs[3], daughterIDs[2]\n daughterIDs[2] = -1*numpy.sign(daughterIDs[3])*(abs(daughterIDs[3])+1)\n \n mothers = ROOT.vector('TLorentzVector')()\n motherIDs = ROOT.vector('int')()\n incoming1=ROOT.TLorentzVector()\n incoming1.SetPxPyPzE(0.,0., event.Generator_x1*6500, event.Generator_x1*6500)\n incoming2=ROOT.TLorentzVector()\n incoming2.SetPxPyPzE(0.,0.,-1*event.Generator_x2*6500, event.Generator_x2*6500)\n mothers.push_back(incoming1)\n mothers.push_back(incoming2)\n genid1 = int(event.Generator_id1)\n genid2 = int(event.Generator_id2)\n motherIDs.push_back(genid1)\n motherIDs.push_back(genid2) \n\n # additional particles (WH, ZH, VBF)\n\n adds = ROOT.vector('TLorentzVector')()\n addIDs = ROOT.vector('int')()\n\n if self.productionProcess == \"ZH\" or self.productionProcess == \"WH\": \n\n VFinalStateIdx = []\n for gid,gen in enumerate(Gen):\n if abs(gen.pdgId) >= 21: continue \n if self.FromH(event, gid) == True: continue \n mid = event.GenPart_genPartIdxMother[gid]\n if mid == -1: continue\n\n if abs(event.GenPart_pdgId[mid]) == 23 and self.productionProcess == \"ZH\": \n VFinalStateIdx.append(gid) \n if abs(gen.pdgId) in [1,2,3,4,5]:\n self.productionMela = ROOT.TVar.Had_ZH\n elif abs(gen.pdgId) in [11,12,13,14,15,16,17,18]:\n self.productionMela = ROOT.TVar.Lep_ZH\n\n if abs(event.GenPart_pdgId[mid]) == 24 and self.productionProcess == \"WH\": \n VFinalStateIdx.append(gid) \n if abs(gen.pdgId) in [1,2,3,4,5]:\n self.productionMela = ROOT.TVar.Had_WH\n elif abs(gen.pdgId) in [11,12,13,14,15,16,17,18]:\n self.productionMela = ROOT.TVar.Lep_WH\n\n if len(VFinalStateIdx) != 2 and self.productionProcess == \"WH\": \n VFinalStateIdx = self.RemoveGammaW(event, VFinalStateIdx)\n\n if len(VFinalStateIdx) != 2: \n VFinalStateIdx = self.RemoveAddHadron(event, VFinalStateIdx)\n\n LHEVFinalState = self.getLHE(event, VFinalStateIdx) \n\n if len(LHEVFinalState)!=2:\n print \"SOMETHING WENT WRONG!, V final state \", len(LHEVFinalState), VFinalStateIdx\n\n for ipart in LHEVFinalState:\n add = ROOT.TLorentzVector()\n add.SetPtEtaPhiM(LHEVFinalState[ipart][0], LHEVFinalState[ipart][1], LHEVFinalState[ipart][2], 0.)\n adds.push_back(add)\n addIDs.push_back(LHEVFinalState[ipart][3]) \n\n elif self.productionProcess == \"VBF\" : \n\n self.productionMela = ROOT.TVar.JJVBF\n\n LHEjetIdx = []\n for idx,part in enumerate(self.LHE):\n if abs(part.pdgId) in [1,2,3,4,5,21] and part.status==1:\n LHEjetIdx.append(idx)\n\n LHEjetIdx = self.pTorder(event, LHEjetIdx)\n\n for ijet in LHEjetIdx:\n add = ROOT.TLorentzVector()\n add.SetPtEtaPhiM(event.LHEPart_pt[ijet], event.LHEPart_eta[ijet], event.LHEPart_phi[ijet], 0.)\n adds.push_back(add)\n addIDs.push_back(int(event.LHEPart_pdgId[ijet]))\n\n if len(adds) !=2 : \n print \"SOMETHING WENT WRONG!, VBF associated partons \", len(adds)\n\n elif self.productionProcess == \"GluGlujj\" : \n\n self.productionMela = ROOT.TVar.JJQCD\n\n LHEjetIdx = []\n for idx,part in enumerate(self.LHE):\n if abs(part.pdgId) in [1,2,3,4,5,21] and part.status==1:\n LHEjetIdx.append(idx)\n\n LHEjetIdx = self.pTorder(event, LHEjetIdx)\n\n for ijet in LHEjetIdx:\n add = ROOT.TLorentzVector()\n add.SetPtEtaPhiM(event.LHEPart_pt[ijet], event.LHEPart_eta[ijet], event.LHEPart_phi[ijet], 0.)\n adds.push_back(add)\n addIDs.push_back(int(event.LHEPart_pdgId[ijet]))\n\n if len(adds) !=2 : \n print \"SOMETHING WENT WRONG!, GluGlujj associated partons \", len(adds)\n\n # Get MEs from MELA\n\n daughter_coll = ROOT.SimpleParticleCollection_t() \n associated_coll = ROOT.SimpleParticleCollection_t() \n mother_coll = ROOT.SimpleParticleCollection_t()\n\n for idx, dau in enumerate(daughters):\n daughter_coll.push_back(ROOT.SimpleParticle_t(daughterIDs[idx], dau)) \n \n for idx, par in enumerate(adds):\n associated_coll.push_back(ROOT.SimpleParticle_t(addIDs[idx], par))\n \n for idx, mot in enumerate(mothers):\n mother_coll.push_back(ROOT.SimpleParticle_t(motherIDs[idx], mot))\n \n self.mela.setCandidateDecayMode(ROOT.TVar.CandidateDecay_WW) \n self.mela.setInputEvent(daughter_coll, associated_coll, mother_coll, 1)\n self.mela.setCurrentCandidateFromIndex(0)\n\n DME = [1, 1, 1, 1, 1, 1, 1] \n PME = [1, 1, 1, 1, 1, 1, 1]\n\n if self.vertexType == \"HVV\":\n\n if self.productionProcess == \"GluGlu\" : \n DME = ROOT.melaHiggsEFT(self.mela, ROOT.TVar.JHUGen, ROOT.TVar.ZZINDEPENDENT, 1, 0)\n else : \n DME = ROOT.melaHiggsEFT(self.mela, ROOT.TVar.JHUGen, ROOT.TVar.ZZINDEPENDENT, 0, 0)\n PME = ROOT.melaHiggsEFT(self.mela, ROOT.TVar.JHUGen, self.productionMela, 0, 0) \n \n gen_dme_hsm = DME[0]\n gen_dme_hm = DME[1]\n gen_dme_hp = DME[2]\n gen_dme_hl = DME[3]\n gen_dme_mixhm = DME[4]\n gen_dme_mixhp = DME[5] \n gen_dme_mixhl = DME[6] \n\n self.out.fillBranch( 'gen_dme_hsm', gen_dme_hsm )\n self.out.fillBranch( 'gen_dme_hm', gen_dme_hm )\n self.out.fillBranch( 'gen_dme_hp', gen_dme_hp )\n self.out.fillBranch( 'gen_dme_hl', gen_dme_hl )\n self.out.fillBranch( 'gen_dme_mixhm', gen_dme_mixhm )\n self.out.fillBranch( 'gen_dme_mixhp', gen_dme_mixhp )\n self.out.fillBranch( 'gen_dme_mixhl', gen_dme_mixhl )\n\n if self.productionProcess != \"GluGlu\" : \n\n gen_pme_hsm = PME[0]\n gen_pme_hm = PME[1]\n gen_pme_hp = PME[2]\n gen_pme_hl = PME[3]\n gen_pme_mixhm = PME[4]\n gen_pme_mixhp = PME[5] \n gen_pme_mixhl = PME[6] \n\n self.out.fillBranch( 'gen_pme_hsm', gen_pme_hsm )\n self.out.fillBranch( 'gen_pme_hm', gen_pme_hm )\n self.out.fillBranch( 'gen_pme_hp', gen_pme_hp )\n self.out.fillBranch( 'gen_pme_hl', gen_pme_hl )\n self.out.fillBranch( 'gen_pme_mixhm', gen_pme_mixhm )\n self.out.fillBranch( 'gen_pme_mixhp', gen_pme_mixhp )\n self.out.fillBranch( 'gen_pme_mixhl', gen_pme_mixhl )\n\n elif self.vertexType == \"Hgg\" : \n\n PME = ROOT.melaHiggsEFT(self.mela, ROOT.TVar.JHUGen, self.productionMela, 1, 0) \n\n gen_pme_hsm = PME[0]\n gen_pme_hm = PME[1]\n gen_pme_mixhm = PME[4]\n\n self.out.fillBranch( 'gen_pme_hsm', gen_pme_hsm )\n self.out.fillBranch( 'gen_pme_hm', gen_pme_hm )\n self.out.fillBranch( 'gen_pme_mixhm', gen_pme_mixhm )\n\n if math.isnan(gen_pme_hsm) : print \"SOMETHING WENT WRONG?, Production ME is nan \"\n if math.isnan(gen_dme_hsm) : print \"SOMETHING WENT WRONG?, Decay ME is nan \"\n\n self.mela.resetInputEvent()\n\n return True", "def test_mutation2(self):\n genotype = '0|0|2|0|0|2|0|0 1|0|0|1|1|0|0|0 0|1|0|0|0|0|2|1--1 7'\n search_space = {'dil_conv_3x3', 'dil_conv_5x5', 'dil_conv_7x7',\n 'skip_connect', 'clinc_3x3', 'clinc_7x7', 'avg_pool_3x3', 'max_pool_3x3'}\n\n mutator = Mutations(search_space, prob_mutation=0.8,\n prob_resize=0.99, prob_swap=0.99)\n mutated_g = mutator(genotype)\n a, s, d = get_conf(mutated_g)\n mutator.update_strat_good(a)", "def _samples(self):\n finite_types = \\\n [QuiverMutationType(t) for t in [['A', 1], ['A', 5], ['B', 2], ['B', 5],\n ['C', 3], ['C', 5], ['D', 2], ['D', 5],\n [\"E\", 6], [\"E\", 7], [\"E\", 8], [\"F\", 4],\n [\"G\", 2]]]\n affine_types = \\\n [QuiverMutationType(t) for t in [['A', [1,1], 1], ['A', [4,5], 1], ['D', 4, 1], ['BB', 5, 1]]]\n elliptic_types = \\\n [QuiverMutationType(t) for t in [['E', 6, [1,1]], ['E', 7, [1,1]]]]\n mutation_finite_types = \\\n [QuiverMutationType(t) for t in [['R2',(1,5)], ['R2',(3,5)]]]\n mutation_infinite_types = \\\n [QuiverMutationType(t) for t in [['E',10], ['BE',5], ['GR',(3,10)], ['T',(3,3,4)]]]\n\n return finite_types + affine_types + elliptic_types + mutation_finite_types + mutation_infinite_types", "def theoretical_stats_selectivity(self) -> np.ndarray:\n warn('This method will likely be phased out', category=FutureWarning)\n grand_final = []\n all_of_it = []\n for elt in self.final_comb_table:\n for elt2 in self.mean_and_sd_dic.keys():\n if str(elt[:self.mutation_number]) == str(elt2):\n elt = np.append(elt, list(self.mean_and_sd_dic[elt2]))\n for elt3 in self.combs_only:\n if np.array_equal(elt[len(self.mutations_list)], elt3) == True:\n theor_mean = np.array([0])\n replicate_values = np.zeros((1, len(self.replicate_matrix[0])))\n for elt4 in elt3:\n target = self.mean_and_sd_array[elt4 - 1][0]\n theor_mean = np.add(theor_mean, target)\n target2 = self.replicate_matrix[elt4 - 1]\n replicate_values = np.add(replicate_values, target2)\n theor_sd = (np.std(replicate_values)) / math.sqrt(self.replicate_number)\n elt = np.append(elt, list(theor_mean))\n elt = np.append(elt, theor_sd)\n grand_final.append(elt)\n if self.verbose:\n print('mutationlist', self.mutations_list)\n print('grand_final', grand_final)\n for elt5 in grand_final:\n at_last = (elt5[len(self.mutations_list) + 1:][0]) - (elt5[len(self.mutations_list) + 1:][2])\n elt5 = np.append(elt5, at_last)\n all_of_it.append(elt5)\n return np.array(all_of_it)", "def get_my_mutations(quality_cutoff, coverage_cutoff):\n\n # my_mutations = {}\n # with open('/home/perry/Projects/loh/working/murim.exome.aa_chg.vars') as f:\n # for line in f:\n # my_mutations[line.strip()] = True\n # return my_mutations\n\n bed_file = 'data/nimblegen/2.1M_Human_Exome_Annotation/2.1M_Human_Exome.bed'\n bed_chr2st2end, bed_chr2posLs = bed_tools.load_bed(bed_file, \n 'NimbleGen Tiled Regions')\n # NimbleGen Tiled Regions\n # Target Regions\n\n use_data_dir = '/home/perry/Projects/loh/data/all_non_ref_hg18/'\n all_somatic = {}\n all_inherited = {}\n cancer_qualities = mutations.get_consensus_qualities(use_data_dir + 'yusanT.ann')\n normal_qualities = mutations.get_consensus_qualities(use_data_dir + 'yusanN.ann')\n for exome in global_settings.exome_types:\n data_file = use_data_dir + exome\n inherited, somatic, murim = mutations.get_mutations(data_file, normal_qualities,\n cancer_qualities, quality_cutoff,\n False, coverage_cutoff)\n # only use the bed_tools NimbleGen\n # restriction for hg18 data\n for s in somatic['yusan']: \n chr, pos = s.split(':')\n if bed_tools.find_location_in_bed(chr, int(pos), \n bed_chr2posLs,\n bed_chr2st2end):\n all_somatic[s] = True\n for i in inherited['yusan']: \n chr, pos = s.split(':')\n if bed_tools.find_location_in_bed(chr, int(pos), \n bed_chr2posLs,\n bed_chr2st2end):\n all_inherited[i] = True\n return (set(all_somatic.keys()) & set(get_murim_covered(quality_cutoff)), set(all_inherited.keys()) & set(get_murim_covered(quality_cutoff)))", "def summarizeReactorStats(self):\n totalMass = 0.0\n fissileMass = 0.0\n heavyMetalMass = 0.0\n totalVolume = 0.0\n numBlocks = len(self.getBlocks())\n for block in self.getBlocks():\n totalMass += block.getMass()\n fissileMass += block.getFissileMass()\n heavyMetalMass += block.getHMMass()\n totalVolume += block.getVolume()\n totalMass = totalMass * self.powerMultiplier / 1000.0\n fissileMass = fissileMass * self.powerMultiplier / 1000.0\n heavyMetalMass = heavyMetalMass * self.powerMultiplier / 1000.0\n totalVolume = totalVolume * self.powerMultiplier\n runLog.extra(\n \"Summary of {}\\n\".format(self)\n + tabulate.tabulate(\n [\n (\"Number of Blocks\", numBlocks),\n (\"Total Volume (cc)\", totalVolume),\n (\"Total Mass (kg)\", totalMass),\n (\"Fissile Mass (kg)\", fissileMass),\n (\"Heavy Metal Mass (kg)\", heavyMetalMass),\n ],\n tablefmt=\"armi\",\n )\n )", "def _Dedup(self):\n kegg_id_to_index = {}\n for i, c in enumerate(self.reactants):\n first_i = kegg_id_to_index.setdefault(c.compound.kegg_id, i)\n if i != first_i:\n self.reactants[first_i].coeff += c.coeff\n c.coeff = 0\n \n self.reactants = filter(lambda x: x.coeff != 0, self.reactants)\n \n # always make sure that H2O is the last reactant (so that it will\n # appear last in the chemical formula)\n i_h2o = self._FindCompoundIndex('C00001')\n if i_h2o is not None:\n self.reactants = self.reactants[:i_h2o] + \\\n self.reactants[(i_h2o + 1):] + \\\n [self.reactants[i_h2o]]", "def sngl_obj_evo(self, lacking):\n prob, algo = self.probinit('jde', 0)\n l = list()\n u = 6+(self.N-3)*4\n for i in range(lacking):\n archi = archipelago(algo,prob,8,16, topology=fully_connected())\n for j in range(u):\n archi.evolve(5)\n stdout.write(\"\\r{0} / {1}\".format(i*u+j+1, lacking*u))\n stdout.flush()\n tmp = [isl for isl in archi]\n tmp.sort(key = lambda x: x.population.champion.f[0]);\n l.append(tmp[0].population.champion)\n stdout.write(\" Done. \")\n return l, prob", "def lof_sig_scores(table, samples, verbose=True):\n mut_probdam = 'Missense:Probably'\n mut_syn = 'Synonymous'\n mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']\n mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']\n mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other\n\n # Calculate the global nonsynonymous:synonymous ratio ---------------------\n # Within each mutation category, sum counts (across all genes)\n tot_count_probdam = sum(table[mut_probdam])\n tot_count_syn = sum(table[mut_syn])\n tot_count_trunc = sum(itertools.chain(*(list(table[col])\n for col in mut_trunc)))\n tot_count_other = sum(itertools.chain(*(list(table[col])\n for col in mut_other)))\n\n # Global mutation count across all categories and genes (= 3504)\n tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,\n tot_count_other))\n if verbose:\n print(\"Counted\", tot_count_all, \"mutations across\", len(table), \"genes\",\n \"and\", len(samples), \"samples\", file=sys.stderr)\n\n # Fraction of global mutations in each category of interest\n tot_frac_probdam = tot_count_probdam / tot_count_all\n tot_frac_syn = tot_count_syn / tot_count_all\n tot_frac_trunc = tot_count_trunc / tot_count_all\n\n # Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)\n tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn\n\n # Calculate each gene's mutation score ------------------------------------\n for _idx, row in table.iterrows():\n gene_count_all = sum([row[col] for col in mut_all])\n if not gene_count_all:\n # Gene is not mutated at all --> zero score\n yield (row['Gene'], 0.0)\n continue\n\n # Initial score is the sum the 'Normalized' values across all samples\n raw_score = sum(row[sid] for sid in samples)\n\n # Adjust for NS:S ratio\n gene_count_syn = row[mut_syn]\n syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,\n 0)\n new_score = raw_score * syn_factor\n\n # Adjust for \"probably damaging\" missense and truncating mutations\n gene_frac_probdam = row[mut_probdam] / gene_count_all\n probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam\n gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all\n trunc_factor = gene_frac_trunc / tot_frac_trunc\n final_score = new_score * probdam_factor * trunc_factor\n yield (row['Gene'], final_score)", "def respond_show_mutation_data(self, content):\n gene_arg = content.gets('GENE')\n\n\n if not gene_arg:\n self.make_failure('MISSING_MECHANISM')\n\n gene_names = _get_term_names(gene_arg)\n if not gene_names:\n return self.make_failure('MISSING_MECHANISM')\n gene_name = gene_names[0]\n\n disease_arg = content.gets('DISEASE')\n if not disease_arg:\n return self.make_failure('MISSING_MECHANISM')\n\n disease_names = _get_term_names(disease_arg)\n if not disease_names:\n return self.make_failure('INVALID_DISEASE')\n\n disease_name = disease_names[0].replace(\"-\", \" \").lower()\n disease_abbr = self.BA.get_tcga_abbr(disease_name)\n if disease_abbr is None:\n return self.make_failure('INVALID_DISEASE')\n\n\n gene_list = []\n for gene_name in gene_names:\n gene_list.append(str(gene_name))\n\n oncoprint_data = self.BA.find_variants_for_genes_cbio(gene_list, disease_abbr, \"tcga\")\n\n self.send_display_oncoprint(oncoprint_data)\n\n reply = KQMLList('SUCCESS')\n\n reply.sets('oncoprint', 'SUCCESS' if len(oncoprint_data) > 0 else 'FAILURE')\n\n\n\n return reply", "def evaluate ( self , genome ) :\n\n\t\tassert isinstance( genome , Genome ), 'genome supplied must be of type cc3dtools.Genome!'\n\t\tloci = genome.get_mutated_loci()\n\t\tmatched_phenotypes = []\n\t\tphenotypes = self.phenotypes.items()\n\n\t\tfor locus in loci:\n\t\t\tfor phenotype, region in phenotypes:\n\t\t\t\t# check if the locus is in the region\n\t\t\t\t# 'locus.locus' to get the float value of that mutation rather \n\t\t\t\t# than an object!\n\t\t\t\tif locus.locus > region[0] and locus.locus < region[1]:\n\t\t\t\t\tmatched_phenotypes.append( phenotype )\n\t\treturn Counter( matched_phenotypes )", "def _get_mutation_amount(self):\n return self._get_sign() * self._get_number()", "def finalize():\n\n print(\"\"\"\n The script analysis/sedov_compare.py can be used to analyze these\n results. That will perform an average at constant radius and\n compare the radial profiles to the exact solution. Sample exact\n data is provided as analysis/cylindrical-sedov.out\n \"\"\")", "def mutations(self, mu):\n # make a copy of the data, and make it an integer\n new_alleles = np.copy(self.geno)\n\n # for an array of the same shape as newAlleles, draw mutations at each\n # position with probability mu.\n vals = np.random.binomial(1, mu, self.size * self.nloci * 2)\n mutate = np.reshape(vals, [ self.size, self.nloci, 2])\n mutate = (mutate == 1)\n # swap zeroes and ones.\n new_alleles[mutate] = 1 - new_alleles[mutate] \n\n # Apply to geno_probs\n new_geno_probs = calculate_geno_probs(new_alleles, mu=mu)\n\n output = genotypeArray(\n geno = new_alleles,\n geno_probs = new_geno_probs,\n names = self.names,\n mothers= self.mothers,\n fathers = self.fathers\n )\n\n return output", "def casdetude_genetics():\n file_path = PROJECT_PATH + \"/geographycal_data/Monterusciello/MontEdo_buildings\"\n router = Router(building_file=file_path)\n\n router.design_aqueduct(0)\n\n router.write2epanet(router.acqueduct, PROJECT_PATH + \"/Monterusciello_solution/Monterusciello_acqueduct\",\n diam=False)\n\n read_epanet = graphIO.graph_reader(router.acqueduct)\n read_epanet.read_epanet(PROJECT_PATH + \"/geographycal_data/SolvedNet/MonteSolution\")\n kpi_calculator(router.acqueduct)\n\n minimal = router.design_minimal_aqueduct(router.acqueduct, \"Q*H\")\n router.write2epanet(minimal, PROJECT_PATH + \"/Monterusciello_solution/Monterusciello_acqueduct\", diam=False)", "def get_summary_statistics(self):\n # Get log 10 total mutation count\n self.log_mut_count = np.log10(self.variant_df.shape[0])\n\n # Get the number of variants stratified by functional location of variant\n # E.g. Exon, Intron, 5'UTR, etc.\n self.functional_counts = pd.DataFrame(self.variant_df['Func.refGene'].value_counts())\n self.functional_counts.columns = [self.sample_name]\n \n # Get the number of variants stratified by exonic functional outcome of variant\n # E.g. Silent, Nonsense, Missense, etc.\n self.mutational_class_counts = (\n pd.DataFrame(self.variant_df['ExonicFunc.refGene'].value_counts())\n )\n self.mutational_class_counts.columns = [self.sample_name]\n \n # Get number of COSMIC curated events\n self.cosmic_variants = self.variant_df[self.variant_df['cosmic70'] != '.']\n self.cosmic_variants = self.cosmic_variants.assign(sample_name = self.sample_name,\n final_id = self.final_id)\n self.cosmic_variant_counts = self.cosmic_variants.shape[0]\n \n # Get depth summary\n self.depth_summary = pd.DataFrame(self.variant_df['depth'].astype(int).describe())\n self.depth_summary.columns = [self.sample_name]\n \n return self.functional_counts, self.mutational_class_counts, self.depth_summary", "def mutation(self, base_offsprings, model_features_count) :", "def determine_aa_change( self ):\n for k,v in self.obj_mi.hash_isoforms.iteritems(): #k = string that is isoform_id, v = Isoform instance\n obj_tt = self.create_transcript_instances( k )\n\n #METHOD 1: get the original codon & mutated codon\n # orig_codon = obj_tt.retrieve_containing_codon( self.snv_start, self.snv_strand )\n # i_genome_pos = obj_tt.arr_genome_pos.index( self.snv_start )\n # obj_tt.arr_nuc_seq[ i_genome_pos ] = self.base_alt\n # mut_codon = obj_tt.retrieve_containing_codon( self.snv_start, self.snv_strand )\n\n\n #METHOD 2: get the mutated codon\n full_pos = self.snv_chrom + ':' + str( self.snv_start ) + '-' + str( self.snv_end )\n hash_codon_info = obj_tt.get_mutated_codon( self.base_orig, self.base_alt, full_pos, self.snv_strand, True ) #output is hash -> {'codon_orig': codon_orig, 'codon_mut': codon_mut, 'aa_orig': aa_orig, 'aa_mut': aa_mut}\n\n\n\n ##TEST:: show the AA change based on mutation\n # print \"hash_codon_info: \"\n # print hash_codon_info\n\n # print \"gene strand & snv strand: \", obj_tt.iso_sj.strand, \" & \", self.snv_strand\n # print \"original base > mutated base: \", self.base_orig, \" > \", self.base_alt\n # print \"original codon > mutated codon: \", hash_codon_info['codon_orig'], \" > \", hash_codon_info['codon_mut']\n # print \"original AA > mutated AA: \", hash_codon_info['aa_orig'], \" > \", hash_codon_info['aa_mut']\n\n\n ##TEST:: determine consequence\n print \"GV_DAAC 1: \"\n obj_tt.alteration_consequence( self.base_alt, self.get_genomic_range(), self.snv_strand, self.alt_type )\n \n\n ##TEST METHOD - SEE WHAT STEPS I NEED TO PERFORM\n #TEST:: retrieve the original base & the mutated base\n # i_genome_pos = obj_tt.arr_genome_pos.index( self.snv_start )\n # orig_base = obj_tt.arr_nuc_seq[ i_genome_pos ]\n # print \"k = \", k, \" & i_genome_pos = \", i_genome_pos, \" | orig_base = \", orig_base, \" & double_check = \", self.base_orig, \" & iso_sj.strand = \", obj_tt.iso_sj.strand, \" & mut strand = \", self.snv_strand\n # hash_orig_codon = obj_tt.find_containing_codon( self.snv_start )\n # print \"hash_orig = \", hash_orig_codon\n # get_orig_codon = obj_tt.arr_nuc_seq[ hash_orig_codon['i_genome_start']:hash_orig_codon['i_genome_end'] + 1 ]\n # str_orig_codon = ''.join( get_orig_codon ) if obj_tt.iso_sj.strand > 0 else ''.join( get_orig_codon[::-1] )\n # print \"seq_orig = \", str_orig_codon, \" & type = \", type( get_orig_codon ), \" & rf = \", obj_tt.arr_rf[ hash_orig_codon['i_genome_start']:hash_orig_codon['i_genome_end'] + 1 ], \" & list_orig_codon = \", get_orig_codon\n\n # ##TEST:: make mutation\n # obj_tt.arr_nuc_seq[ i_genome_pos ] = self.base_alt\n # hash_mut_codon = obj_tt.find_containing_codon( self.snv_start )\n # print \"hash_muts = \", hash_mut_codon\n # get_mut_codon = obj_tt.arr_nuc_seq[ hash_mut_codon['i_genome_start']:hash_mut_codon['i_genome_end'] + 1 ]\n # str_mut_codon = ''.join( get_mut_codon ) if obj_tt.iso_sj.strand > 0 else ''.join( get_mut_codon[::-1] )\n # print \"seq_muts = \", str_mut_codon, \" & type = \", type( get_mut_codon ), \" & rf = \", obj_tt.arr_rf[ hash_mut_codon['i_genome_start']:hash_mut_codon['i_genome_end'] + 1 ], \" & list_mut_codon = \", get_mut_codon \n\n # ##TEST:: retrieve \n # print \"AA: from \", Seq( str_orig_codon ).translate( to_stop = False ), \">\", Seq( str_mut_codon ).translate( to_stop = False )\n\n # try:\n # i_genome_pos = obj_tt.arr_genome_pos.index( self.snv_start )\n # orig_base = obj_tt.arr_nuc_seq[ i_genome_pos ]\n # print \"k = \", k, \" & i_genome_pos = \", i_genome_pos, \" | orig_base = \", orig_base, \" & double_check = \", self.base_orig, \" & iso_sj.strand = \", obj_tt.iso_sj.strand, \" & mut strand = \", self.snv_strand\n # hash_orig_codon = obj_tt.find_containing_codon( self.snv_start )\n # print \"hash_orig = \", hash_orig_codon\n # get_orig_codon = obj_tt.arr_nuc_seq[ hash_orig_codon['i_genome_start']:hash_orig_codon['i_genome_end'] ]\n # print \"seq_orig = \", get_orig_codon\n\n # ##TEST:: make mutation\n # obj_tt.arr_nuc_seq[ i_genome_pos ] = self.base_alt\n # hash_mut_codon = obj_tt.find_containing_codon( self.snv_start )\n # print \"hash_muts = \", hash_mut_codon\n # get_mut_codon = obj_tt.arr_nuc_seq[ hash_mut_codon['i_genome_start']:hash_mut_codon['i_genome_end'] ]\n # print \"seq_muts = \", get_mut_codon \n\n # ##TEST:: retrieve \n # print \"AA: from \", Seq( orig_codon ).translate( to_stop = False ), \">\", Seq( mut_codon ).translate( to_stop = False )\n # except:\n # print \"ERROR:: for \", k, \", position does not exist: \", self.snv_start\n # continue\n\n print \"////////////////////\\n\"", "def others():\n\n # Fuel cells ('FC') were not calculated and assigned heat rates\n # These sum up to 63 MW of capacity in WECC\n # Cleanest option is to remove them from the current runs:\n query = \"CREATE TABLE switch.fuel_cell_generation_plant_backup (like generation_plant);\\\n INSERT INTO fuel_cell_generation_plants\\\n (SELECT * FROM generation_plant WHERE gen_tech = 'FC');\\\n DELETE FROM generation_plant_scenario_member gpsm USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpsm.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant_cost gpc USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpc.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant_existing_and_planned gpep USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpep.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant WHERE gen_tech = 'FC';\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n\n # Others ('OT') also do not have an assigned heat rate. Assign an average.\n query = \"UPDATE generation_plant set full_load_heat_rate = \\\n (select avg(full_load_heat_rate)\\\n from generation_plant\\\n join generation_plant_scenario_member using (generation_plant_id)\\\n where energy_source = 'Gas'\\\n and generation_plant_scenario_id = 2)\\\n where gen_tech = 'OT' and energy_source = 'Gas'\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n\n # Replace 'NaN's with 'Null's\n # (NaNs result from the aggregation process)\n cols_to_replace_nans = ['connect_cost_per_mw','hydro_efficiency','min_build_capacity',\n 'unit_size','storage_efficiency','store_to_release_ratio',\n 'min_load_fraction','startup_fuel','startup_om',\n 'ccs_capture_efficiency', 'ccs_energy_load']\n for col in cols_to_replace_nans:\n query = \"UPDATE generation_plant SET {c} = Null WHERE {c} = 'NaN'\".format(c=col)\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n print \"Replaced NaNs in column '{}'\".format(col)\n\n # Replace Nulls with zeros where Switch expects a number\n query = \"UPDATE generation_plant\\\n SET connect_cost_per_mw = 0.0\\\n WHERE connect_cost_per_mw is Null\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)", "def test_population_bioequivalence():\n\n # See 10.3 Example from Chow et al.\n h = Population(l=-0.2966, stdev_11=0.2, stdev_tt=math.sqrt(0.17),\n stdev_tr=math.sqrt(0.17), stdev_bt=0.4, stdev_br=0.4,\n rho=0.75, alpha=0.05, power=0.8)\n h.calculate()\n assert h.n == 12", "def analyse ( self ) :\n \n ## get all B0 particles\n bs1 = self.gselect ( 'bs1' , \"[ Beauty => ( D_s+ ==> K- K+ pi+ ) K-]CC \")\n bs2 = self.gselect ( 'bs2' , \"[ Beauty -> ( D_s+ --> K- K+ pi+ ) K-]CC \")\n \n cnt = self.counter(\"#1 + photos \")\n cnt += bs1.size()\n \n cnt = self.counter(\"#2 - photos \")\n cnt += bs2.size()\n\n if len(bs1) != len(bs2) :\n self.Warning(\" FOUND!!!!\" , SUCCESS )\n for b in bs1:\n print ' With PHOTOS: ', b.decay() , b.barcode()\n for b in bs2:\n print ' Without PHOTOS: ', b.decay() , b.barcode()\n \n \n return SUCCESS # RETURN ", "def Problem10():\n return 'When yield strength in tension and compression are not equal'", "def print_statistics(self):\n print 'Ran %s iterations in %0.3f seconds\\n' % (\n self.iterations, self.elapsed_time)\n\n print 'Overall Equity'\n for index in range(len(self.holdem_ranges)):\n range_short_form = '%r' % self.holdem_ranges[index]\n print 'P%s) %-15s %0.3f' % (\n index,\n range_short_form,\n float(self.win_stats.get(index, 0))/self.iterations)\n print '\\n'\n print 'Hand distribution for each player'\n for stats in self.player_stats:\n stats.print_report()", "def mutations(self):\n for site in self.sites():\n yield from site.mutations", "def mutations(self):\n for site in self.sites():\n yield from site.mutations", "def check_mixture_health(self):\n h = HealthDict()\n h['mole_fraction_too_low'] = []\n h['mole_fraction_too_high'] = []\n conc = mole_summation(phase=self)\n lo = np.where(conc < 1.0)[0]\n hi = np.where(conc > 1.0)[0]\n if len(lo) > 0:\n h['mole_fraction_too_low'] = lo\n if len(hi) > 0:\n h['mole_fraction_too_high'] = hi\n return h", "def print_output(self):\n print(\"Reference score: \" + str(self.PotTax_reference.sum().TFI))\n print(\"Intervention score: \" + str(self.PotTax_intervention.sum().TFI))\n return" ]
[ "0.5473835", "0.54621196", "0.5410071", "0.5340635", "0.53035814", "0.53019184", "0.5292105", "0.5210743", "0.5179937", "0.5177363", "0.51658905", "0.5165434", "0.51492155", "0.51443344", "0.5134806", "0.5133196", "0.51056236", "0.5080089", "0.5078701", "0.50761664", "0.5064717", "0.50360346", "0.49886078", "0.4984535", "0.49828735", "0.49786213", "0.49703494", "0.49703494", "0.49648368", "0.49453676" ]
0.54736185
1
Install the app base on the source and the destination given. It makes sure that the destination folder does not exist. It then run the checks to see if the installation is completed.
def install(self): if self.__checkDestination(): if self.__ui: self.__ui.progressText.append('Destination found, removing it...') self.__removeFiles(self.__targetPath, verbose=True) if self.__ui: self.__ui.progressText.append('Copying files...') self.__copyFiles() if self.__ui: self.__ui.progressText.append('Runnign checks...') if not self.__runChecks(): raise InstallationError("The installation has fail, it did not pass one of the checks") # Clean the tmp folder by removing the source self.__removeFiles(self.__sourcePath)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pre_install(self, dest_dir):\n pass", "def install(src, dst):\n try:\n dst = os.path.join(install_dir, dst, os.path.basename(src))\n src = os.path.join(source_dir, src)\n assert os.path.isfile(src)\n assert not os.path.isdir(dst)\n if not os.path.isdir(os.path.dirname(dst)):\n os.makedirs(os.path.dirname(dst))\n shutil.copy(src, dst)\n print 'Installed', dst\n except Exception:\n print 'Could not install', dst", "def install(src, dest):\n shutil.move(src, dest)\n restorecon(dest, recursive=True)", "def install_apps(self, app_installers):\n print('[?] Installing missing APK(s) and IPA(s).')\n for app_installer in app_installers:\n with request.urlopen(app_installer[1]) as response, open(app_installer[0], 'wb') as out_app_file:\n if response.getcode() != 200:\n print(f'[-] Failed to install {app_installer[1]}.')\n return\n print(f'[+] Successfully installed {app_installer[1]}.')\n shutil.copyfileobj(response, out_app_file)", "def install_from_application(self, cfgFile, toLocation):\n fullPathFrom = os.path.join(self._ctx['BUILD_DIR'], cfgFile)\n if os.path.exists(fullPathFrom) and os.path.isfile(fullPathFrom):\n fullPathTo = os.path.join(self._ctx['BUILD_DIR'], toLocation)\n self._safe_makedirs(os.path.dirname(fullPathTo))\n shutil.copy(fullPathFrom, fullPathTo)", "def post_install(self, dest_dir):\n raise NotImplementedError(\"post_install is not implemented\")", "def _deploy_app():\n rsync_project(env.remote_directory, env.local_directory,\n exclude=['.git/', '*.pyc', 'tests.py', 'migrations/'])\n sudo('service installer_app restart')", "def install(src, perm, dest, cmds, comp, verbose=False):\n if comp == Cmp.nosrc:\n ansiprint(\n f\"The source file '{src}' does not exist.\", fg=Color.black, bg=Color.red\n )\n elif comp == Cmp.same:\n return\n try:\n if os.path.exists(dest):\n os.chmod(dest, stat.S_IRUSR | stat.S_IWUSR)\n copyfile(src, dest)\n os.chmod(dest, perm)\n if cmds and subprocess.call(cmds) != 0:\n ansiprint(f\"Post-install commands for {dest} failed.\", fg=Color.red)\n except Exception as e:\n ansiprint(f\"Installing '{src}' as '{dest}' failed: {e}\", fg=Color.red)\n return\n ansiprint(f\"File '{src}' was successfully installed as '{dest}'.\", fg=Color.green)", "def alock_installFunc(dest, source, env):\n\n owner = env.get('INSTALL_OWNER', None)\n if owner:\n try:\n uid = pwd.getpwnam(owner)[2]\n except TypeError:\n uid = owner\n else:\n uid = -1\n\n group = env.get('INSTALL_GROUP', None)\n if group:\n try:\n gid = grp.getgrnam(group)[2]\n except TypeError:\n gid = group\n else:\n gid = -1\n\n mode = env.get('INSTALL_MODE', None)\n if not mode:\n st = os.stat(source)\n mode = (stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)\n if isinstance(mode, str):\n mode = int(mode, 8)\n\n shutil.copy2(source, dest)\n\n if owner or group:\n os.chown(dest, uid, gid)\n\n os.chmod(dest, mode)\n return 0", "def _install(self):\n\n local_source_path = join(BespokeGlobals.ABS_LOCAL_TOOLS,\n self._tool.install_properties['source_path'])\n\n remote_target_path = self._tool.install_properties['target_path']\n\n if isdir(local_source_path):\n self._staf_dir_copy(local_source_path, remote_target_path)\n elif isfile(local_source_path):\n self._staf_file_copy(local_source_path, remote_target_path)\n else:\n raise CoreError('Failed to stage tool \"{0}\" on remote machine! The file/directory '\n '\"{1}\" does not exist!'.format(self._tool.name, local_source_path))", "def install_or_update_source():\n new_install = install_source()\n if not new_install:\n update_source()", "def install(self, options=None):\n if not BuildSystemBase.install(self):\n return False\n\n if not self.subinfo.options.useShadowBuild:\n self.enterSourceDir()\n else:\n self.enterBuildDir()\n if options != None:\n command = f\"{self.makeProgram} {options}\"\n else:\n command = f\"{self.makeProgram} install\"\n\n return self.system(command)", "def bundle(self):\n\n try:\n self.build_directory.mkdir(parents=True)\n except FileExistsError:\n logger.warning('Directory already exists: %s', self.build_directory)\n decision = input(\n f'{self.build_directory} already exists. Overwrite? Y/[N]: '\n )\n if decision.strip().upper() == 'Y':\n logger.info('Deleting old build directory: %s', self.build_directory)\n shutil.rmtree(self.build_directory)\n self.build_directory.mkdir(parents=True)\n else:\n return\n\n with cd(self.app_directory):\n self._install_dependencies()\n self._handle_supplemental_data()\n self._cleanup_files()\n if self.make_zip:\n self._zip_files()", "def _prepare_relocate(self):\n\n if osp.exists(self.pkg_dest_dir):\n _remove(self.pkg_dest_dir)\n os.makedirs(self.pkg_dest_dir)\n self.msg.debug('pkg-install-dir: %s', self.pkg_install_dir)\n self.msg.debug('pkg-dest-dir: %s', self.pkg_dest_dir)\n return", "def install(c, dest):\n for sp_ns in ns_foreach_task_subdir(c):\n try:\n sp_ns.tasks.install(c, dest)\n except UnexpectedExit:\n pass", "def install(self,name,destFiles,data_sizeCrcDate,progress=None):\n pass", "def install():\n execute(generate)\n execute(upload)", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def install_step(self):\n\n cmd = \"./INSTALL -noroot -silent -install_dir %s\" % self.installdir\n run_cmd(cmd, log_all=True, simple=True)\n\n adjust_permissions(self.installdir, stat.S_IWOTH, add=False)", "def action_install(args):\n\n dest = Path(args.dest)\n\n module_root = Path(\"modules/\")\n modules = load_modules(module_root)\n\n try:\n candidates = {modules[target] for target in args.targets}\n dependencies = set()\n for candidate in candidates:\n dependencies |= set(candidate.resolve_dependencies(modules))\n candidates |= dependencies\n except KeyError as e:\n key = e.args[0]\n print(f\"{key} module not found\")\n sys.exit(1)\n\n print(f\"Will install: {', '.join(c.name for c in candidates)}\")\n if not confirm(\"install?\", default=True):\n return\n\n for mod in candidates:\n print(f\"Installing {mod.name}...\")\n mod.install(dest)", "async def install(self) -> None:\n self.add_to_output(\"Starting install, please wait...\")\n # outsource installer process\n proc = await asyncio.create_subprocess_shell(\n f'{os.path.join(os.getcwd(), \"files\", \"bos-toolbox\", \"bos-toolbox.bat\")} install {self.ip} --pool-user UpstreamDataInc.test --no-keep-pools --psu-power-limit 900 --no-nand-backup --feeds-url file:./feeds/',\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE)\n # get stdout of the install\n while True:\n # stderr = await proc.stderr.readuntil(b'\\r')\n stdout = await proc.stderr.readuntil(b'\\r')\n if stdout == b'':\n break\n # self.add_to_output(stderr.decode(\"utf-8\").strip())\n self.add_to_output(stdout.decode(\"utf-8\").strip())\n self.add_to_output(\"Rebooting...\")\n await proc.wait()\n self.add_to_output(\"Install complete...\")\n while not await self.ping_http():\n await asyncio.sleep(3)\n await asyncio.sleep(5)", "def deploy():\n build()\n copy()\n install()", "def ExecuteIf(self, args, src_files, dst_files):\n if self.ShouldBuild(src_files, dst_files):\n self.MakeDestinationDirectories(dst_files)\n self.Execute(args)\n if self.execute and not self.VerifyExists(dst_files):\n raise RuntimeError(\"FAILED: build did not create all required files\")", "def _install():\n\tprint \"Preparing to install {} script.\".format(SCRIPT_NAME)\n\t\n\t#make sure there is a place to install the script to.\n\tif not \"SCRIPTS\" in os.environ:\n\t\tprint \"Please set SCRIPTS environment variable.\"\n\t\tsys.exit(1)\n\t\n\tscript_dir = os.environ[\"SCRIPTS\"]\n\t\n\t#check to see if already installed\n\tif _is_already_installed(script_dir):\n\t\tprint \"A version of {} is already installed.\".format(SCRIPT_NAME)\n\t\tprint \"Do you wish to overwrite it? [Y,n]\"\n\t\tif raw_input() != 'Y':\n\t\t\tprint \"Cancelling installation of {}.\".format(SCRIPT_NAME)\n\t\t\tsys.exit(0)\n\t\telse:\n\t\t\tprint \"Overwritting previously installed script {}.\".format(SCRIPT_NAME)\n\t\t\t_uninstall()\n\t\n\t#copy python sources into script directory\n\tnew_dir = os.path.join(script_dir, SCRIPT_NAME)\n\tshutil.copytree(\"src\", new_dir)\n\t\n\t#copy executable and add permissions\n\tfor name in EXEC_NAMES:\n\t\tos.system(\"sudo cp bin/{0} /bin/{0}\".format(name))\n\t\tos.system(\"sudo chmod +x /bin/{}\".format(name))", "def do_install(self, args):\n if args:\n try:\n plugin_name, file_path = args.split()[0], args.split()[1]\n except Exception as e:\n return print(display_messages(\"the argument is invalid please type ?install for more information\", error=True))\n if not path.isfile(file_path):\n return print(\n display_messages(\n \"the file {} not found \".format(file_path), error=True\n )\n )\n head, tail = os.path.split(file_path)\n dest = copyfile(file_path, \"{}/{}\".format(self.temp_path, tail))\n print(display_messages(\"copy content file .zip to {}\".format(dest), info=True))\n \n path_to_zip_file = tempfile.gettempdir() + \"/{}\".format(tail)\n with ZipFile(path_to_zip_file, \"r\") as zip_ref:\n zip_ref.extractall(tempfile.gettempdir())\n temp_path_file_extracted = \"{}/{}.py\".format(self.temp_path, plugin_name)\n print(\n display_messages(\n \"extracted files on : {}\".format(temp_path_file_extracted), info=True\n )\n )\n if not path.isfile(temp_path_file_extracted):\n return print(\n display_messages(\n \"the file {} not found \".format(temp_path_file_extracted), error=True\n )\n )\n temp_templates_path = \"{}/{}\".format(self.temp_path, plugin_name)\n if not path.isdir(temp_templates_path):\n return print(\n display_messages(\n \"the directory template {} not found \".format(temp_templates_path), error=True\n )\n )\n source = temp_path_file_extracted\n destination = \"{}/{}.py\".format(self.captiveflask_setup_path, plugin_name)\n dest = copyfile(source, destination)\n print(display_messages(\"copy content file to {}\".format(dest), info=True))\n\n copy_tree(\n temp_templates_path, C.user_config_dir + \"/config/templates/{}\".format(plugin_name)\n )\n print(\n display_messages(\n \"plugin {} install {}\".format( plugin_name,setcolor(\"sucessful\", color=\"green\")),\n info=True,\n )\n )\n return \n print(\n display_messages(\"unknown command: {} \".format(args), error=True)\n )", "def install_step(self):\n\n tmpdir = os.path.join(self.builddir, 'tmp')\n stagedir = os.path.join(self.builddir, 'staged')\n try:\n os.chdir(self.builddir)\n os.makedirs(tmpdir)\n os.makedirs(stagedir)\n except OSError as err:\n raise EasyBuildError(\"Failed to prepare for installation: %s\", err)\n\n env.setvar('IATEMPDIR', tmpdir)\n dst = os.path.join(self.builddir, self.src[0]['name'])\n\n cmd = \"%s -i console\" % dst\n\n qanda = {\n \"PRESS <ENTER> TO CONTINUE:\": '',\n 'Press Enter to continue viewing the license agreement, or enter' \\\n ' \"1\" to accept the agreement, \"2\" to decline it, \"3\" to print it,' \\\n ' or \"99\" to go back to the previous screen.:': '1',\n 'ENTER AN ABSOLUTE PATH, OR PRESS <ENTER> TO ACCEPT THE DEFAULT :': self.installdir,\n 'IS THIS CORRECT? (Y/N):': 'y',\n 'PRESS <ENTER> TO INSTALL:': '',\n \"PRESS <ENTER> TO EXIT THE INSTALLER:\": '',\n \"CHOOSE LOCALE BY NUMBER:\": '',\n \"Choose Instance Management Option:\": '',\n }\n noqanda = [r'Installing\\.\\.\\..*\\n.*------.*\\n\\n.*============.*\\n.*$']\n\n run_cmd_qa(cmd, qanda, no_qa=noqanda, log_all=True, simple=True)\n\n try:\n os.chmod(self.installdir, stat.S_IRWXU | stat.S_IXOTH | stat.S_IXGRP | stat.S_IROTH | stat.S_IRGRP)\n except OSError as err:\n raise EasyBuildError(\"Can't set permissions on %s: %s\", self.installdir, err)", "def install(args):\n scripts = get_console_scripts(args.package)\n for script in scripts:\n src = os.path.join(args.source, script)\n dest = os.path.join(args.destination, script)\n logger.info('symlinking {1} to {0}'.format(dest, src))\n force_symlink(src, dest)", "def install(self,archive,destFiles,data_sizeCrcDate,progress=None):\n raise AbstractError", "def _install(self):\n\n pass", "def install():\n deploy()\n configure()" ]
[ "0.6939311", "0.67621106", "0.66896224", "0.63416165", "0.6273808", "0.62601715", "0.6225733", "0.60879886", "0.6076069", "0.60691816", "0.60395724", "0.5985055", "0.59724474", "0.59244055", "0.5914381", "0.5912949", "0.5879463", "0.586991", "0.5838821", "0.5827504", "0.58250856", "0.5796179", "0.5793242", "0.57922024", "0.57920104", "0.5790878", "0.5784673", "0.5753459", "0.56992865", "0.5680049" ]
0.750132
0
Copy the files from the source to the destination. At this point it assume that the destination folder does not exist.
def __copyFiles(self): if os.path.isdir(self.__sourcePath): shutil.copytree(self.__sourcePath, self.__targetPath) else: shutil.copy2(self.__sourcePath, self.__targetPath)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def copy_files(source,destination):\r\n filelist = os.listdir(source)\r\n \r\n if not os.path.exists(destination):\r\n os.mkdir(destination)\r\n \r\n for filename in filelist:\r\n source_file = os.path.join(source,filename)\r\n shutil.copy(source_file,destination)", "def _copy_if_not_exists(source: pl.Path, destination: pl.Path) -> None:\n if destination.is_dir():\n destination_file = destination / source.name\n else:\n destination_file = destination\n if not destination_file.exists():\n su.copy(source, destination)", "def copy(self):\n source = os.path.abspath(self.path)\n destination = os.path.abspath(self.target)\n\n logger.info(\"Running Copy Method - SOURCE=\\\"{src}\\\" DESTINATION=\\\"{dst}\\\" IGNORE=\\\"{ignore}\\\"\".format(src=source, dst=destination, ignore=self.ignore))\n\n if not os.path.exists(source):\n logger.error(\"\\\"{source}\\\" PATH DOESN'T EXIST. PROGRAM TERMINATED. Please check log file.\".format(source=source))\n\n if self.rules is not None:\n files = self.rules\n else:\n self.create_packet_structure(source)\n files = self.files\n\n for (k,v) in files.items():\n src = os.path.join(source,k)\n dst = os.path.join(destination,v)\n dirpath = os.path.dirname(dst)\n if not os.path.isdir(dirpath):\n logger.info(\"Create directory - \\\"{dst}\\\"\".format(dst=dirpath))\n os.makedirs(dirpath)\n logger.info(\"copy from \\\"{f}\\\" to \\\"{t}\\\"\".format(f=src,t=dst))\n shutil.copyfile(src,dst)\n logger.info(\"OK\")", "def copy_files(self, source, target):\n\n if source == target and is_local(self.borrowed_ctx.host):\n logger.warning(\"IGNORE self-node: {}\".format(self.borrowed_ctx.host))\n return\n\n try:\n for item in os.listdir(source):\n if os.path.isfile(os.path.join(source, item)):\n logger.debug(\n \"processing {} --> {}\".format(\n os.path.join(source, item), self.borrowed_ctx.host\n )\n )\n self._sftp_channel.put(\n os.path.join(source, item), \"%s/%s\" % (target, item)\n )\n else:\n self.mkdir(\"%s/%s\" % (target, item), ignore_existing=True)\n self.copy_files(\n os.path.join(source, item), \"%s/%s\" % (target, item)\n )\n except Exception as e:\n logger.warning(\n \"Error of processing target = ({}:{}), for reason: {}\".format(\n self.borrowed_ctx.host, self.borrowed_ctx.port, e,\n )\n )\n exit(0)", "def copy_supported_files(self):\n\n try:\n for directory in self.config.DIRS_TO_COPY:\n shutil.copytree(self.dir_helper.publication_path + directory,\n self.temp_dir + directory)\n except Exception, ex:\n print '[e] exception {}'.format(str(ex))\n print '[i] looks like to folder existing that are scheduled for copying'\n\n for file_ in self.config.FILES_TO_COPY:\n index = file_.rfind('\\\\')\n dest_file = file_\n if index != -1:\n dest_file = file_[index+1:]\n\n try:\n shutil.copy2(self.dir_helper.publication_path + file_,\n self.temp_dir + dest_file)\n except Exception, ex:\n print '[e] exception {}'.format(str(ex))\n print '[i] file \"{}\" was not copied'.format(self.dir_helper.publication_path + file_)", "def copy(source, destination):\r\n\r\n source_ = os.path.abspath(os.path.expanduser(source))\r\n destination_ = os.path.abspath(os.path.expanduser(destination))\r\n\r\n if not os.path.exists(destination_) and not os.path.isfile(source_):\r\n os.makedirs(destination_)\r\n\r\n def recurse(source, destination):\r\n for entry in os.listdir(source):\r\n entry_path = os.path.join(source, entry)\r\n if os.path.isdir(entry_path):\r\n entry_dest = os.path.join(destination, entry)\r\n if os.path.exists(entry_dest):\r\n if not os.path.isdir(entry_dest):\r\n raise IOError('Failed to copy {0} a directory.'\r\n .format(entry_dest))\r\n recurse(entry_path, entry_dest)\r\n else:\r\n shutil.copytree(entry_path, entry_dest)\r\n else:\r\n shutil.copy2(entry_path, destination)\r\n\r\n\r\n if os.path.isdir(source_):\r\n recurse(source_, destination_)\r\n\r\n elif os.path.isfile(source_):\r\n dest_dir = os.path.dirname(destination_)\r\n if not os.path.exists(dest_dir):\r\n os.makedirs(dest_dir)\r\n shutil.copy2(source_, destination_)\r\n logger.info('copying %s to %s' % (source_, destination_))\r\n else:\r\n logger.warning('skipped copy %s to %s' % (source_, destination_))", "def copy(self, destination):\n destination = Path(destination)\n src_base = str(self.directory)\n if self.flatten:\n dst_base = destination\n else:\n dst_base = Path(destination.joinpath(self.directory.stem))\n\n for src in self.locations_to_copy:\n if src.is_dir():\n for dir_path, dir_names, file_names in os.walk(str(src)):\n if self.flatten:\n dst_dir = dst_base\n else:\n dst_dir = Path(dir_path.replace(src_base, str(dst_base)))\n if not dst_dir.exists():\n dst_dir.mkdir(parents=True)\n for file in file_names:\n shutil.copy2(os.path.join(dir_path, file), str(dst_dir))\n else:\n if self.flatten:\n dst_dir = dst_base\n else:\n dst_dir = Path(str(src.parent).replace(src_base, str(dst_base)))\n if not dst_dir.exists():\n dst_dir.mkdir(parents=True)\n shutil.copy2(str(src), str(dst_dir))", "async def _copy_folder_files(self, src_dir, dest_dir):\n for dir_item in os.listdir(src_dir):\n src_path = os.path.join(src_dir, dir_item)\n if os.path.isfile(src_path):\n await self._copy_file_with_hook(dir_item, src_path, os.path.join(dest_dir, dir_item))", "def copyFile(source,destination):\r\n logging.info(\"source\",source)\r\n logging.info(\"destination\",destination)\r\n try:\r\n shutil.copy(source, destination)\r\n logging.info(\"File copied successfully.\")\r\n \"\"\"If source and destination are same\"\"\"\r\n except shutil.SameFileError:\r\n logging.info(\"File not copied sucessfuly.\")\r\n \"\"\"List files and directories\"\"\"\r\n logging.info(\"After copying file:\")\r\n logging.info(os.listdir(destination))\r\n \"\"\"logging.info path of newly\r\n created file\"\"\"\r\n logging.info(\"Destination path:\", destination)", "def fresh_copy_dir(source_path, target_path):\n os.mkdir(target_path)\n for item in os.listdir(source_path):\n s = os.path.join(source_path, item)\n t = os.path.join(target_path, item)\n if os.path.isdir(s):\n fresh_copy_dir(s, t)\n else:\n shutil.copyfile(s, t)", "def copy_files(self):\n for (source_name, target_name) in self.FILES_TO_LINK:\n src = os.path.expanduser(source_name)\n tgt = os.path.expanduser(target_name)\n cmd = 'cp -rf {src} {tgt}'.format(src=src, tgt=tgt)\n\n print(cmd)\n if not self.dry_run:\n run(cmd)", "def copy_files(src_path, dst_path):\r\n for folder in os.listdir(src_path):\r\n for file in os.listdir(os.path.join(src_path, folder)):\r\n source = os.path.join(os.path.join(src_path, folder), file)\r\n dest = os.path.join(dst_path, file)\r\n shutil.copy(source, dest)", "def copy(source_path, skip_existing=True):\n\n dest_path = source_path.replace(source_dir.strip('/'), dest_dir.strip('/'))\n\n # Skip if dest file already exists\n if skip_existing and os.path.exists(dest_path):\n return\n\n # Create directory if necessary\n os.makedirs(os.path.dirname(dest_path), exist_ok=True)\n\n copyfile(source_path, dest_path)", "def transfer_files_from_dir_copy(\n source_filepath_list: Iterable[str],\n destination_filepath_list: Iterable[str],\n force_overwrite: bool = False,\n delete_source: bool = False\n) -> None:\n hide_progress_bar = logger.getEffectiveLevel() > logging.INFO\n for src, dst in tqdm(zip(source_filepath_list, destination_filepath_list), disable=hide_progress_bar):\n os.makedirs(path.dirname(dst), exist_ok=True)\n if force_overwrite and path.lexists(dst):\n os.remove(dst)\n if delete_source:\n shutil.move(src, dst)\n else:\n shutil.copyfile(src, dst)", "def _copy_file ( self, source, dest ):\n return", "def _copy_dir(src, dst):\n if os.path.isdir(src):\n os.makedirs(dst, exist_ok=True)\n for item in os.listdir(src):\n s = os.path.join(src, item)\n d = os.path.join(dst, item)\n\n if os.path.isdir(s):\n _copy_dir(s, d)\n else:\n shutil.copy2(s, d)\n\n else:\n os.makedirs(os.path.dirname(dst), exist_ok=True)\n _delete_file(dst)\n shutil.copy2(src, dst)", "def copy_directory(source, dest):\n for path, dirs, files in walk(source):\n relative_src_path = path.replace(source, \"\").lstrip(\"/\")\n abs_dest_path = join(dest, relative_src_path)\n if not exists(abs_dest_path):\n makedirs(abs_dest_path)\n for tdir in dirs:\n dest_dir = join(abs_dest_path, tdir)\n if not exists(dest_dir):\n makedirs(dest_dir)\n for tfile in files:\n src_file = join(path, tfile)\n dest_file = join(abs_dest_path, tfile)\n if islink(src_file):\n linkto = readlink(src_file)\n symlink(linkto, dest_file)\n continue\n else:\n process_file(src_file, dest_file)", "def copyanything(src, dst):\n try:\n copytree(src, dst, dirs_exist_ok=True)\n except FileExistsError as e: # noqa\n pass\n except OSError as err:\n # TODO(dittrich): This causes a pylint error\n # Not sure what test cases would trigger this, or best fix.\n if err.errno == os.errno.ENOTDIR: # type: ignore\n copy(src, dst)\n else:\n raise\n finally:\n remove_other_perms(dst)", "def copy_folder(source, destination):\n\n try:\n shutil.copytree(source, destination)\n except (OSError, IOError):\n return False\n else:\n return True", "def copyDir(src, dst, includes, excludes = []):\n\tmultiFilesReplacements([], dst, src, includes, excludes)", "def copy(source, destination):\n if os.path.isdir(source):\n return __copytree(source, destination)\n else:\n return __copyfile2(source, destination)", "def copy_folder(src: str, dest: str) -> None:\n\tuux.show_info(\"Copying folder \" + src + \" => \" + dest)\n\n\tif not os.path.exists(src):\n\t\tuux.show_error(\"Unable to copy, '\" + src + \"' does not exist.\")\n\t\treturn\n\n\tmkdir(dest)\n\n\tfor fn in os.listdir(src):\n\t\tif os.path.isfile(src + fn):\n\t\t\ttry:\n\t\t\t\tcopy_file(src + fn, dest)\n\t\t\texcept IOError as ex:\n\t\t\t\tuux.show_error(\"Failed to copy file, \" + os.strerror(ex.errno))", "def conditional_copy(copy_tuples):\n for source_file, destination_file in copy_tuples:\n # If the root_directory and destination file contents are the same, don't perform unnecessary file I/O\n if not destination_file.exists() or not filecmp.cmp(source_file, destination_file, shallow=False):\n destination_file.parent.mkdir(parents=True, exist_ok=True)\n shutil.copyfile(source_file, destination_file)", "def replicate(self, source):\n names = [\n name for name in os.listdir(source)\n if not name.startswith('.')\n ]\n\n # Filter out directories and copy files\n for name in names:\n src = os.path.abspath(os.path.join(source, name))\n dst = os.path.abspath(os.path.join(self.target, name))\n\n if os.path.isfile(src):\n shutil.copy(src, dst)", "def copy_files(file: str, destination: str):\n\n try:\n result = _process_files(\"cp\", \"-vp\", file, destination)\n except FileNotFoundError:\n print(\"ERROR: '{}' does not exist.\".format(file))\n except FolderNotFoundError:\n print(\"ERROR: '{}' destination does not exist.\".format(\n destination)\n )\n except InsufficientRightsError:\n print(\"ERROR: Insufficient rights to destination '{}'.\".format(\n destination)\n )\n else:\n print(result)", "def __copyfile(source, destination):\n logger.info(\"copyfile: %s -> %s\" % (source, destination))\n try:\n __create_destdir(destination)\n shutil.copy(source, destination)\n return True\n except Exception as e:\n logger.error(\n \"copyfile: %s -> %s failed! Error: %s\", source, destination, e\n )\n return False", "def copy(self, src_path: str, tgt_path: str) -> None:", "def dir_copy(self, path, dest, recursive=True):\n if j.sal.fs.exists(path):\n self.dir_copy_from_local(path, dest, recursive=recursive)\n else:\n self.dir_copy_from_bcdbfs(path, dest, recursive=recursive)", "def main(source_dir, dest_dir):\n\n paths = []\n for root, _, files in os.walk(source_dir):\n paths.extend([os.path.join(root, f) for f in files])\n\n def copy(source_path, skip_existing=True):\n \"\"\"Copies a file from source_path to source_path with\n source_dir replaced by dest_dir.\n\n Arguments:\n source_path(str): Path to a file to be copied.\n skip_existing(bool): True to skip copying files\n when the destination file already exists.\n \"\"\"\n\n dest_path = source_path.replace(source_dir.strip('/'), dest_dir.strip('/'))\n\n # Skip if dest file already exists\n if skip_existing and os.path.exists(dest_path):\n return\n\n # Create directory if necessary\n os.makedirs(os.path.dirname(dest_path), exist_ok=True)\n\n copyfile(source_path, dest_path)\n\n p_umap(copy, paths)", "def copy_source_files(self):\n\n LOGGER.info(f'start copying source files')\n count = 0\n for sfp in tqdm(sorted(self.source_fps), disable=self.disable_tqdm):\n try:\n meta = extract_law_meta(sfp)\n nodes = parse_xml_fp(sfp)\n tfp = self.stot(sfp)\n tfp.parent.mkdir(parents=True, exist_ok=True)\n save_law_tree(meta['LawTitle'], nodes, tfp)\n except Exception as e:\n LOGGER.error(f'failed to copy {sfp}: {e}')\n continue\n self.target_fps.add(tfp)\n LOGGER.debug(f'copied {sfp} to {tfp}')\n count += 1\n LOGGER.info(f'copied total {count} source files, now total {len(self.target_fps)} target files exist')" ]
[ "0.7597833", "0.74374366", "0.7421489", "0.74199075", "0.7275083", "0.72525555", "0.7226759", "0.7090539", "0.69888306", "0.6975546", "0.6959424", "0.6957595", "0.69572604", "0.693605", "0.68874115", "0.68684584", "0.6854901", "0.6811629", "0.67662746", "0.6756434", "0.6754417", "0.67535985", "0.6752406", "0.6750187", "0.6716537", "0.6700827", "0.6699915", "0.6697754", "0.6696486", "0.6694921" ]
0.7903018
0
Check if the destination exist.
def __checkDestination(self): return os.path.exists(self.__targetPath)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exists(self, destination: Route) -> bool:\n i = hash(destination.addr)\n return i in self.keys()", "def file_copy_remote_exists(self, src, dest=None, file_system=None):\n self.enable()\n if file_system is None:\n file_system = self._get_file_system()\n\n file_copy = self._file_copy_instance(src, dest, file_system=file_system)\n if file_copy.check_file_exists() and file_copy.compare_md5():\n log.debug(\"Host %s: File %s already exists on remote.\", self.host, src)\n return True\n\n log.debug(\"Host %s: File %s does not already exist on remote.\", self.host, src)\n return False", "def directory_exists(destination):\n\n if not os.path.isdir(destination):\n raise RuntimeError('Directory %s does not exists' % (destination))\n\n return True", "def is_out_dir_present(self, dest_dir):\n if not Path(dest_dir).exists():\n print('No such output directory found:', dest_dir)\n return False\n return True", "def is_destinations_page_loaded_properly(self):\n return self.is_element_present(self.search_destination_locator)", "def is_vendor_destination_present(self):\n return self.is_element_present(self.vendor_destination_locator)", "def _copy_if_not_exists(source: pl.Path, destination: pl.Path) -> None:\n if destination.is_dir():\n destination_file = destination / source.name\n else:\n destination_file = destination\n if not destination_file.exists():\n su.copy(source, destination)", "def _is_valid_dest(self, state, index):\n try:\n return state.is_empty_at_index(index)\n except: # TODO: Catch custom exception\n return False", "def is_broken_link(self):\n if not os.path.exists(self.dst):\n if os.path.lexists(self.dst):\n return True\n return False", "def has_access(self, source, destination, port):\n logger.info('Looking for path from %s to %s on port %s', source, destination, 80)\n self._validate_args(source, destination)\n paths = self.list()\n logger.info('Found paths %s', paths)\n return self._has_access(paths, source, destination, port)", "def exists(self):\r\n return os.path.exists(self.full_path)", "def verify_destination(self, destination):\n # Make sure the text file was copied to the destination.\n text_file = os.path.join(destination, 'notes.txt')\n assert os.path.isfile(text_file)\n with open(text_file) as handle:\n assert handle.read() == \"This file should be included in the backup.\\n\"\n # Make sure the subdirectory was copied to the destination.\n subdirectory = os.path.join(destination, 'subdirectory')\n assert os.path.isdir(subdirectory)\n # Make sure the symbolic link was copied to the destination.\n symlink = os.path.join(subdirectory, 'symbolic-link')\n assert os.path.islink(symlink)", "def check_file_exist(self):\n return False", "def exists(self):\n if self.host.exists(self.remote_path):\n print 'Yes, config exists already.'\n return True\n else:\n print 'Config doesn\\'t exist yet'\n return False", "def target_exists(self, target_id=0):\n try:\n target = self.target(target_id=target_id)\n except Exception as e:\n log.error(\"Exception checking if target exists: {} {}\".format(type(e), e))\n return False\n return target is not None", "def check_exists(self, name):\n if self.pyload.config.get(\"download\", \"skip_existing\"):\n download_folder = self.pyload.config.get(\n 'general', 'download_folder')\n dest_file = fsjoin(download_folder,\n self.pyfile.package().folder if self.pyload.config.get(\n \"general\", \"folder_per_package\") else \"\",\n name)\n if exists(dest_file):\n self.pyfile.name = name\n self.skip(_(\"File exists.\"))", "def exists(self):\n return self.islink() or exists(self._path)", "def exists(self):\n log.warning('Could not determine whether %s exists due to unhandled scheme.', self.file_name)", "def is_file_exists(self):\n pass", "def exists(self):\n return os.path.isfile(self.location)", "def Exists(self, path: str) -> bool:\n ...", "def exists(self, url):\n return (self.base_path / url).exists()", "def exists(self, path):", "def exists(self):\n return True", "def exists(self):\n return True", "def _url_exists(self, url):\n return url_exists(url)", "def exists(self):\n return self.path.exists()", "def object_exists(self, fname):\n return False", "def exist(self):\n return self.file_path.exists()", "def exists(self):\n # TODO: What about broken sym-links?\n return os.path.exists(self.path)" ]
[ "0.6858912", "0.6722851", "0.671514", "0.64568853", "0.64418423", "0.6440546", "0.6426673", "0.6364792", "0.63298523", "0.6284588", "0.6248964", "0.6243041", "0.6229809", "0.61921597", "0.61565524", "0.613735", "0.6128297", "0.6117316", "0.60820246", "0.60760295", "0.60641205", "0.60583717", "0.6040897", "0.6035339", "0.6035339", "0.60085994", "0.6005004", "0.5977878", "0.59776205", "0.5952482" ]
0.8561405
0
Set the destination path.
def setDestinationPath(self, targetPath): self.__targetPath = targetPath
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dest_path(self, dest_path):\n\n self._dest_path = dest_path", "def set_destination(self, value):\n try:\n self._destination = value\n self._repository = hg.repository(ui.ui(), self._destination)\n except:\n self._repository = None", "def destination(self, destination):\n\n self._destination = destination", "def setDestination(self, node):\n self.dest_node = node", "def save(self, **kwargs):\n if len(self.path) > 0:\n self.path = self.path.strip().rstrip()\n super(TargetPath, self).save(**kwargs)", "def path(self, path):\n self._path = path", "def setPath(*args):", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def setPath(self, path):\n if self._path != path:\n self._path = path\n self.__update_preview()", "def set_path(self, sNewSharePath):\n\t\tcall_sdk_function('PrlShare_SetPath', self.handle, sNewSharePath)", "def _set_path(self):\n self.path = self._get_path()\n self.depth = self.get_depth()\n\n self.save()", "def putPath(self, path, pathname):\n self.paths[path] = pathname", "def SetDestination(self, point):\n\t\tif type(point) != Point:\n\t\t\traise TypeError(\"Incorrect Type\")\n\n\t\tself.destination = point", "def set_path(self, directory):\n self.directory = directory", "def _set_download_dir(self, path):\n assert path, 'Must input a non-empty path.'\n self.manager.download_dir = path", "def do_destination(self, args):\n self.destination = int(args)", "def setPath(self, path):\n libxml2mod.xmlURISetPath(self._o, path)", "def change_dir(self, src: str = None, dest: str = None):\n\n if not is_empty(src):\n self._srcDir = src\n\n if not is_empty(dest):\n self._destDir = dest", "def setSourcePath(self, sourcePath):\n self.__sourcePath = sourcePath", "def set(path):\n rino.remote.set(path)", "def SetPath(self, directory):\r\n\r\n if directory is not None and exists(directory) and isdir(directory):\r\n self.directory = directory", "def destination_config(self, destination_config):\n self._destination_config = destination_config", "def setup_signal_destination(cls, dest):\n cls.signal_destination = dest", "def _set_path(self, address):\n self._fullpath = address\n # http://stackoverflow.com/questions/8384737\n split_path = ntpath.split(address)\n self._filename = split_path[1] or ntpath.basename(split_path[0])", "def __set_full_path_of_file(self, value):\n self.full_path_of_file = value", "def set_path_url(self, data):\n self._path_url = self._uni(data)" ]
[ "0.7895495", "0.74544734", "0.7052994", "0.67628735", "0.6749421", "0.66697735", "0.664493", "0.6620139", "0.6620139", "0.6620139", "0.6620139", "0.6620139", "0.66084516", "0.6494985", "0.6414038", "0.6353641", "0.63524634", "0.63138354", "0.6285127", "0.62720805", "0.62612736", "0.62605566", "0.62146133", "0.6212251", "0.62108123", "0.6209646", "0.6129818", "0.6114137", "0.6101537", "0.60632163" ]
0.8375323
0
Set the source path.
def setSourcePath(self, sourcePath): self.__sourcePath = sourcePath
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_source_path(self, folder):\n self.source_path = folder", "def set_source_file(self, source_file):\n self.set_attribute(\"source_file\", source_file)", "def set_source(self, source):\n self.data['source'] = source", "def set_source(self, source_name):\n self.source = source_name", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def svn_client_copy_source_t_path_set(svn_client_copy_source_t_self, char_path): # real signature unknown; restored from __doc__\n pass", "def setPath(*args):", "def setSourcePath(self, offset=0):\n while True:\n tempSourcePath = input(\n offset * \" \" + \"Specify any change to the default source path [%s]: \" % self.sourcePath) or self.sourcePath\n if os.path.exists(tempSourcePath):\n self.sourcePath = tempSourcePath\n break\n else:\n print(\"Path does not exist!\")", "def setPath(self, path):\n if self._path != path:\n self._path = path\n self.__update_preview()", "def source(self, source: Source):\n self._source = source", "def setSourceFile(filename):", "def src(self, src):\n\n self._src = src", "def setDestinationPath(self, targetPath):\n self.__targetPath = targetPath", "def path(self, path):\n self._path = path", "def do_source(self, line):\n\n if self.root_directory:\n self.source_file = self.root_directory + \"/\" + line\n self.do_check_file(self.source_file)\n else:\n self.source_file = line\n self.do_check_file(self.source_file)", "def __set_full_path_of_file(self, value):\n self.full_path_of_file = value", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def path(self, path):\n\n self._path = path", "def src_subpath(self, val: str):\n self[\"src_subpath\"] = val", "def set_flow_source(self, source):\n self._source = source", "def set_scanpath(self, scanpath):\n self.scanpath = scanpath" ]
[ "0.7961063", "0.75596577", "0.7289281", "0.7227081", "0.70959604", "0.70959604", "0.70959604", "0.70959604", "0.70959604", "0.70959604", "0.70959604", "0.7083602", "0.70523936", "0.6990167", "0.694741", "0.6841845", "0.6805303", "0.6765955", "0.6750728", "0.6708304", "0.6708254", "0.6675009", "0.66064113", "0.66064113", "0.66064113", "0.66064113", "0.66064113", "0.6577588", "0.64991003", "0.6439791" ]
0.8587527
0
Return hash derived from the the call stack filename and location
def caller_hash(depth:int=1, prefix:str='#') -> str: caller = getframeinfo(stack()[depth+1][0]) str = f"{caller.filename}/{caller.lineno}" _hash = hash(str) _hash += sys.maxsize + 1 return prefix + hex(_hash)[2:]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_hash(self) -> str:\n if self.call_hash:\n # Derived state from a call_node.\n return hash_struct([\"Handle\", self.fullname, \"call_hash\", self.key, self.call_hash])\n else:\n # Initial state.\n return hash_struct([\"Handle\", self.fullname, \"init\", self.key, self.args, self.kwargs])", "def func_hash(self) -> str:\n\n return self.call_data[:10]", "def caller_info(self):\n\n frames = traceback.extract_stack()\n frames.reverse()\n try:\n (_, mod_name) = __name__.rsplit('.', 1)\n except ValueError:\n mod_name = __name__\n for (fpath, lnum, _, _) in frames:\n (fname, _) = os.path.basename(fpath).rsplit('.', 1)\n if fname != mod_name:\n break\n\n return (fname, lnum)", "def _fingerprint(self):\n hasher = hashlib.md5()\n source = inspect.getsource(self._func)\n hasher.update(source.encode('utf-8'))\n\n return hasher.hexdigest()", "def _actual_hash(self):\n return hash_of_file(join(self._temp_path, self._downloaded_filename()))", "def fetch_local_hashcode(self, path):\n\t\treturn hashlib.sha256(open(self.config[\"daemon\"][\"rootdir\"] + path, \"rb\").read()).hexdigest()", "def hash(self):\n return os.popen('git rev-parse HEAD').read().strip()", "def __hash__(self):\n return hash(self._full_path)", "def calc_info_hash(self):\n return \"infohash\"", "def _get_githash(self) -> str:\n return get_git_revision_hash(self.source_dir)", "def calc_info_hash(self) -> bytes:\n raise NotImplementedError()", "def get_info_hash(self):\n return self.info_hash", "def get_hash():\n return render(build_hash('command'),False)", "def get_hash(self):\r\n return", "def frame_location_info(self):\n\n return str(self.active_frame.f_code.co_filename) + \":\" + str(self.active_frame.f_lineno)", "def get_binary_name():\n return os.path.basename(inspect.stack()[-1][1])[:16]", "def _get_git_hash(self):\n try:\n with open(os.path.join(self._base_dir, '.git', 'HEAD'), 'r') as head_file:\n ref = head_file.read().strip()\n if ref[:5] == 'ref: ':\n with open(os.path.join(self._base_dir, '.git', ref[5:]), 'r') as commit_file:\n return commit_file.read().strip()\n else:\n return ref[5:]\n except Exception as err:\n self._logger.warning('Couldnt read the git commit hash: %s :: %s',\n err.__class__.__name__, err)\n return 'UNKNOWN'", "def _findCaller(stack_info=False):\n f = logging.currentframe()\n #On some versions of IronPython, currentframe() returns None if\n #IronPython isn't run with -X:Frames.\n if f is not None:\n f = f.f_back\n rv = \"(unknown file)\", 0, \"(unknown function)\", None\n while hasattr(f, \"f_code\"):\n co = f.f_code\n filename = os.path.normcase(co.co_filename)\n if filename == logging._srcfile:\n f = f.f_back\n continue\n sinfo = None\n if stack_info:\n sio = io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f, file=sio)\n sinfo = sio.getvalue()\n if sinfo[-1] == '\\n':\n sinfo = sinfo[:-1]\n sio.close()\n rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)\n break\n return rv", "def get_hash(self, params):\n return self.sha", "def current_hash(self):", "def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"", "def hash(self):\n return self.block_header.hash", "def get_hash(self):\n return self.__hash", "def get_data_hash(args):\n pass", "def get_hash(self) -> str:\n return self.__hash.hexdigest()", "def hash(self):\n return self._obs_file.hash()", "def static_file_hash(filepath):\n hasher = hashlib.md5() # nosec: B303\n\n with contextlib.closing(open(filepath, 'rb')) as file:\n hasher.update(file.read())\n return hasher.hexdigest()", "def find_actual_caller(self):\n\n # Gleaned from code in the logging module itself...\n try:\n f = sys._getframe(1)\n ##f = inspect.currentframe(1)\n except Exception:\n f = None\n # On some versions of IronPython, currentframe() returns None if\n # IronPython isn't run with -X:Frames.\n if f is not None:\n f = f.f_back\n rv = \"(unknown module)\", \"(unknown file)\", 0, \"(unknown function)\"\n while hasattr(f, \"f_code\"):\n co = f.f_code\n filename = os.path.normcase(co.co_filename)\n mod = inspect.getmodule(f)\n\n if mod is None:\n modname = '__main__'\n else:\n modname = mod.__name__\n\n if modname == __name__:\n # Crawl back until the first frame outside of this module\n f = f.f_back\n continue\n\n rv = (modname, filename, f.f_lineno, co.co_name)\n break\n return rv", "def get_hash(self):\n source = \"\"\n for cell in self.original_cells:\n source += \"\\n\".join(get_source(cell))\n return hashlib.sha256(source.encode(\"utf-8\")).hexdigest()", "def current_git_hash():\n git_file = \".git/refs/heads/master\"\n git_path = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),\n os.pardir, os.pardir, git_file))\n\n if not os.path.exists(git_path):\n git_path = os.getcwd() + \"/\" + git_file\n if not os.path.exists(git_path):\n git_path = os.getcwd() + \"/../\" + git_file\n if not os.path.exists(git_path):\n return None\n\n with open(git_path, \"r\") as git:\n git_hash = git.read()\n\n return git_hash[0:5]" ]
[ "0.7092891", "0.7029913", "0.6900932", "0.6653355", "0.6600427", "0.6556791", "0.653537", "0.6502056", "0.64829963", "0.64236236", "0.63612115", "0.6338636", "0.630916", "0.6291573", "0.6283543", "0.6251896", "0.62413055", "0.6219128", "0.6216642", "0.61987674", "0.61952037", "0.6191258", "0.61893827", "0.61850655", "0.6178537", "0.61727136", "0.6153", "0.6149852", "0.61416256", "0.6131779" ]
0.79158044
0
Devolve n! (n fatorial) >>> fatorial(1) 1 >>> fatorial(5) 120 >>> fatorial(30) 265252859812191058636308480000000L
def fatorial(n): if n <= 1: return 1 return n * fatorial(n-1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deconvolve(num, den, n=None):\n num = np.atleast_1d(num)\n den = np.atleast_1d(den)\n N = len(num)\n D = len(den)\n if D > N and n is None:\n quot = []\n rem = num\n else:\n if n is None:\n n = N - D + 1\n input = np.zeros(n, float)\n input[0] = 1\n quot = signal.lfilter(num, den, input)\n num_approx = signal.convolve(den, quot, mode=\"full\")\n if len(num) < len(num_approx): # 1d only ?\n num = np.concatenate((num, np.zeros(len(num_approx) - len(num))))\n rem = num - num_approx\n return quot, rem", "def dirichlet_conv(f, g):\n @natural_input\n def fg(n):\n return sum([f(d) * g(n//d) for d in divisors(n)])\n return fg", "def cdilate(f, g, b=None, n=1):\n\n if b is None: b = secross()\n y = intersec(f,g)\n for i in xrange(n):\n aux = y\n y = intersec(dilate(y,b),g)\n if isequal(y,aux): break\n return y", "def fakultet (n = 1):\n sum = 1\n for i in range(n, 1, -1):\n sum *= i\n return sum", "def d(n):\n return sum(divisors(n))", "def nth_derivative(f, x, n):\n h = 10e-2\n out_h = 1/(h**n)\n out = 0\n for k in range(0, n+1):\n out += (-1)**(k+n)*choose(n,k)*f(x +k*h)\n return out_h*out", "def d(n):\n divisors = []\n for i in range(1, n):\n if n % i == 0:\n divisors.append(i)\n return sum(divisors)", "def fact_i(n):\n \n result = 1\n while n > 1:\n result *= n\n n -= 1\n return result", "def d(n):\n rt = math.sqrt(n)\n i = 2\n result = 1\n while i < rt:\n if n % i == 0:\n result += i\n result += n // i\n i += 1\n\n # i == rt implies that n is a square number\n if i == rt and n % i == 0:\n result += i\n return result", "def sol(n):\n p = 1\n res = 0\n \n while n:\n p*=5\n if n&1:\n res+=p\n n=n>>1\n return res%1000000007", "def d(n):\n if n not in d_memo:\n # Start with 1 so n isn't counted\n total = 1\n # Loop from 2 to sqrt(n)\n for i in xrange(2, int(n**0.5) + 1):\n if n % i == 0:\n total += i\n # Only add the other divisor if it isn't a square\n if i * i != n:\n total += n/i\n\n d_memo[n] = total\n\n return d_memo[n]", "def facti(n):\n if n == 0: return 1\n f= 1\n for i in range(2,n):\n f= f*i\n return f", "def fact(n):\n answer = 1\n while n > 1:\n answer *= n\n n -= 1\n return answer", "def getDivisors(n):", "def facti(n: int) -> int:\n if n == 0:\n return 1\n f = 1\n for i in range(2, n):\n f = f*i\n return f", "def evolve(Z, T, n=1):\r\n Z = closure(Z)\r\n\r\n for i in range(n):\r\n prob_T = prob(Z, T)\r\n if prob_T != 0.0:\r\n Z = np.matmul(Z, T)/prob_T\r\n else:\r\n Z = closure([1]*len(Z))\r\n Z = np.matmul(Z, T)/prob(Z, T)\r\n return Z", "def d(i):\n if i==0:\n return 0\n elif (i%2)==0:\n return g(i-1) % N\n else:\n return g(i) % N", "def apply(self, f_del, h):\n\n fd_rule = self.rule\n\n ne = h.shape[0]\n nr = fd_rule.size - 1\n _assert(nr < ne, 'num_steps ({0:d}) must be larger than '\n '({1:d}) n + order - 1 = {2:d} + {3:d} -1'\n ' ({4:s})'.format(ne, nr+1, self.n, self.order, self.method))\n f_diff = convolve(f_del, fd_rule[::-1], axis=0, origin=nr // 2)\n\n der_init = f_diff / (h ** self.n)\n ne = max(ne - nr, 1)\n return der_init[:ne], h[:ne]", "def fn(n):\n if n == 0: return 1\n return sum(fn(i)*fn(n-i-1) for i in range(n))", "def factorial(n):\n return reduce(mul, range(1, n), 1)", "def square_difference(n):\n\n return n*(n+1)*(3*n+2)*(n-1)/12", "def delta_n(n, zeros):\n #return log(zeros[n]/2.0/pi/e)/2.0/pi*(zeros[n+1]-zeros[n])\n return log(zeros[n]/2.0/pi)/2.0/pi*(zeros[n+1]-zeros[n])", "def factorial(n):\n\tf = 1\n\tfor i in range(1,n+1):\n\t\tf = f*i\n\n\treturn f", "def factorial_loop(n):\n\n pass # @todo -fix this", "def ne(n):\n return 4*n*n - 2*n + 1", "def fact_while1(n: int) -> int:\n ret = 1\n if n == 0:\n return 1\n while True:\n if n == 1:\n return ret\n n, ret = n - 1, ret * n", "def factorial(n):\n if n == 0:\n return 1\n else:\n return reduce((lambda x, y: x * y), range(1, n + 1))", "def sw(n):\n return 4*n*n + 2*n + 1", "def euler_sde(self, x, rv_n):\n n = self.mp.params[0]; k = self.mp.params[1];\n gamma = self.mp.params[2]; dt = self.mp.params[3];\n\n if x.get_shape()[1] > 1:\n evolve_fun = self.evolve_system\n else:\n evolve_fun = self.evolve\n\n dx = dt * self.evolve(x, n, k, gamma)\n x = x + dx + tf.sqrt(dt)*x*rv_n\n return tf.cast(x, tf.float32)", "def cycles_direct(n, m):\n if n <= 2:\n return 1\n two_exp = pow(3, n - 2)\n two = pow(2, two_exp, m)\n three_exp = 3 * (pow(3, n - 3) - 1) // 2\n three = pow(3, three_exp, m)\n return (two * three) % m" ]
[ "0.6400818", "0.590391", "0.58923686", "0.5881932", "0.5868089", "0.57699984", "0.57487494", "0.5739737", "0.57129353", "0.5664286", "0.56454784", "0.56116253", "0.55943865", "0.5588568", "0.5585924", "0.55539", "0.55361503", "0.55334914", "0.55174816", "0.5490245", "0.54787153", "0.54680353", "0.54644984", "0.54627067", "0.5426773", "0.542648", "0.54155654", "0.54068446", "0.53802365", "0.536901" ]
0.6433339
0
Greets the user with the parameter as the phrase
def greeter(): name = input("What is your name? ") print("\nMarvin says:\n") print("Hello %s - your awesomeness!" % name) print("What can I do you for?!")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def greet_player(name):\n\t\n\tprint \"How are are you doing %s?\" % name", "def greet_user(username):\n print(\"Hello, \" + username + \"!\")", "def greet_user(username):\r\n print(\"Hello, \" + username + \"!\")", "def greet_user(username):\n print(\"Hello, \" + username.title() + \"!\")", "def greet_user(username):\n print(\"Hello, \" + username.title() + \"!\")", "def greet_user():\r\n print('Hi,' + FirstName + ' ' + LastName + ' thanks for joining us inside the beer app!')", "def greet_user(name = 'Menoetius'):\n print(\"Hello, my name is \" + name.title() + \"!\")", "def greet_username(username):\n print(\"Hello, \" + username.title() + \"!\")", "def greet_user(username):\r\n print(f\"Hello, {username.title()}!\")", "def greet(name, msg='Good Morning !'):\n print(\"Hello\", name , \".\", msg)", "def greeting_message(name):\n print('My name is ' + name)", "def greet_user():\r\n print(\"hello!\")", "def greeter_user(username):\n print(f\"Hello {username}\")", "def greet_user():\n print(\"Hello\")", "async def say(self, ctx, *args):\n if not args:\n await ctx.send('did you want me to say something?')\n return\n message = ' '.join(args)\n message = profanity_filter(message)\n await ctx.send(message)", "def greet_user():\n print(\"Hello!\")", "def greet_user():\n print(\"Hello!\")", "def greet_user():\n print(\"Hello!\")", "def greet_person():\n\n player = request.form.get(\"person\")\n\n # compliment = choice(AWESOMENESS)\n compliment = request.form.getlist(\"compliment\")\n # returns list so would need another function to loop over list to pull in\n # multiple choices, perhaps to create a string to then pass through as\n # compliment, etc.\n\n return \"\"\"\n <!doctype html>\n <html>\n <head>\n <title>A Compliment</title>\n </head>\n <body>\n Hi, {player}! I think you're a {compliment}!\n </body>\n </html>\n \"\"\".format(player=player, compliment=compliment)", "def speak(message):\n print(message)", "async def say(self, ctx):\n\n await ctx.message.delete()\n if len(ctx.message.content.split()) < 2:\n return await ctx.send('You must inform all parameters!')\n\n msg = ctx.message.content.split('!say', 1)\n await ctx.send(msg[1])", "def greeting(players_name):\n print \"\\nGreat! Welcome, \" + players_name + \". The purpose of this game is to fill in the blanks for all the sentences provided.\"", "async def phrase(self, ctx):\n await self.heleus.send_command_help(ctx)", "def speak(text):\r\n engine.say(text)\r\n engine.runAndWait()\r\n print(text)", "def greet_person():\n\n player = request.args.get(\"person\")\n\n compliment = choice(AWESOMENESS)\n\n return render_template(\"compliment.html\",\n person=player,\n compliment=compliment)", "def greet_person():\n\n player = request.args.get(\"person\")\n\n compliment = choice(AWESOMENESS)\n\n return render_template(\"compliment.html\",\n person=player,\n compliment=compliment)", "async def hi(self, ctx, *, extra=\"\"):\n if str(ctx.author) == \"East#4048\" and extra.startswith(\"there...\"):\n async with ctx.typing():\n await asyncio.sleep(1)\n await ctx.send(\"Hello East.\")\n await asyncio.sleep(1)\n async with ctx.typing():\n await asyncio.sleep(2)\n await ctx.send(\"Thank you. The same to you.\")\n return\n await ctx.send(f\"Hello there {ctx.author.name}\")", "def speak(self, what):\n if isinstance(what, str):\n return self.whatever()\n\n what = self.clean(what)\n if not what or what == '':\n return self.silence()\n if what.isupper():\n return self.shouting()\n if what.endswith('?'):\n return self.asking()\n return self.whatever()", "def greet_guest():\n print('Welcome')", "def greet_user(self):\n print(\"Hello \" + self.f_name.title() + \"!, hope you're well today!\")" ]
[ "0.714194", "0.70204103", "0.7017552", "0.697279", "0.697279", "0.6967602", "0.6929661", "0.6916902", "0.6748197", "0.6707344", "0.6688582", "0.66677773", "0.6619989", "0.66091704", "0.6571961", "0.65656203", "0.65656203", "0.65656203", "0.6558656", "0.65458745", "0.6540771", "0.65239537", "0.6490181", "0.6487193", "0.6474868", "0.6474868", "0.64744246", "0.6473803", "0.6444556", "0.6441103" ]
0.7055033
1
Multiplies number with word
def multiply(number, word): return int(number) * word
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_multiplier_words(number, word, language_data):\n required_part = word.split(\"<\")[-1]\n if required_part[0] != \" \":\n return\n\n power_of_10 = _count_zero(number)\n if \"$\" in required_part:\n valid_words = re.findall(CAPTURE_BRACKET_CONTENT, required_part)\n for valid_word in valid_words:\n power_of_10_num = pow(10, power_of_10)\n if power_of_10 >= 2:\n language_data[\"BIG_POWERS_OF_TEN\"][valid_word] = power_of_10_num\n return\n if \"[\" in required_part:\n valid_word = required_part.split(\"[\")[0].strip()\n else:\n valid_word = required_part.split(\">\")[0].strip()\n\n power_of_10_num = pow(10, power_of_10)\n if power_of_10 >= 2:\n language_data[\"BIG_POWERS_OF_TEN\"][valid_word] = power_of_10_num", "def multiply_string(message, n):\r\n return message*n", "def multiply_numbers(first_number, second_number):", "def multiply(*args):\n\n # TODO: Fill sum with the correct value, based on the\n # args provided.\n multiplier = str(args[0] * args[1])\n return multiplier", "def multiplication(numb1, numb2):\r\n return f\"Your result: {numb1*numb2}\"", "def multiply(*args):\n\n result = int(args[0]) * int(args[1])\n\n return str(result)", "def mult(self, first, second):\n try:\n if isinstance(second, str):\n second = self._variables[second]\n first_num = self._variables[first]\n self._variables[first] = int(first_num * second)\n except:\n print(f\"Could not multiply {first} * {second}\")", "def multiply(self, num1, num2):\n if num1 == '0' or num2 == '0':\n return '0'\n\n num = [0] * (len(num1) + len(num2))\n for i, x2 in enumerate(reversed(num2)):\n for j, x1 in enumerate(reversed(num1)):\n num[i+j] += int(x1) * int(x2)\n num[i+j+1] += num[i+j] / 10\n num[i+j] %= 10\n for i in xrange(len(num)-1):\n num[i+1] += num[i] / 10\n num[i] %= 10\n while len(num) > 1 and not num[-1]:\n num.pop()\n return ''.join(map(str, num[::-1]))", "def inner_echo(word1):\n echo_word = word1 * n\n return echo_word", "def inner_echo(word1):\n echo_word = word1 * n\n return echo_word", "def multiplier(self) -> global___Expression:", "def multiplication_total_of(num_list):", "def multiply(num):\n print(int(num * 4))", "def numerize():\n pass", "def math_mult():\n a = int(request.args.get(\"a\"))\n b = int(request.args.get(\"b\"))\n return str(mult(a, b))", "def multiply(numbers):\n \n result = numbers[0]\n for n in numbers[1:]:\n result = n * result\n return result", "def multiply(numbers):\n prod = 1\n for i in numbers:\n prod = prod*i\n return prod", "def multiplication(number1, number2):\n return number1 * number2", "def multiplication(number1, number2):\n return number1 * number2", "def multiply(value, multiplier):\n return value*multiplier", "async def multiply(message, number1: ParamType.NUMBER, number2: ParamType.NUMBER):\n prod = number1 * number2\n return \"product = \" + str(prod)", "def mult(value, arg):\n return int(value)*int(arg)", "def multiply(num1, num2):\n return num1 * num2", "def multiply(num1, num2):\n return num1 * num2", "def multiply_nums(n1, n2):\n\n result = n1 * n2\n return result", "def multiply(num1, num2):\n return num1 * num2", "def multiplication(num1, num2):\n product = num1 * num2\n return product", "def mul(a, b):\n c = Calculator()\n result = c.mul(a, b)\n click.echo('{} * {} = {}'.format(a, b, result))", "def multiplyScore(self, multiplier):\n self.__score *= 1 + ( multiplier / 10 )\n self.__score = int(self.__score)", "def multiply(num1, num2):\n product = num1 * num2\n return product" ]
[ "0.7133484", "0.70913893", "0.6875895", "0.669992", "0.66984", "0.65846294", "0.6558947", "0.6519726", "0.6497288", "0.6497288", "0.63721395", "0.6366141", "0.6305392", "0.62679416", "0.62597495", "0.62541693", "0.6243564", "0.6230531", "0.6230531", "0.62106043", "0.61858195", "0.618578", "0.61646765", "0.61646765", "0.6126035", "0.61258096", "0.61212075", "0.6108944", "0.6103464", "0.6103415" ]
0.86316043
0
Compares input number with the latest number
def compare_numbers(): compareOld = None while True: compareInput = input("Please provide a number. Quit with 'done'") if compareInput == 'done': break try: compareNumber = int(compareInput) if compareOld is None: print("This is your first number") elif compareNumber == compareOld: print("Your number is the same as before!") elif compareNumber < compareOld: print("Your number is less than before!") elif compareNumber > compareOld: print("Your number is bigger than before!") compareOld = compareNumber except ValueError: print("You need to provide a number")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_smallest_number_with_last_digit_equal_to_an_input_digit():\n assert smallest_number_with_last_digit_equal_to_an_input_digit([1, 6, 34, 68, 40, 48, 20], 8) == 48\n assert smallest_number_with_last_digit_equal_to_an_input_digit([1, 2, 3], 3) == 3\n assert smallest_number_with_last_digit_equal_to_an_input_digit([101, 1001, 100001], 1) == 101", "def test_number_compare(self):\n self.assertEqual(functions.number_compare(1, 1), \"Numbers are equal\")\n self.assertEqual(functions.number_compare(1, 0), \"First is greater\")\n self.assertEqual(functions.number_compare(2, 4), \"Second is greater\")", "def evaluate_number(number : int)->int:\n if type(number) == int and number >1 and number < 100:\n num = total_numbers = porc = 0\n while porc < number:\n num = num + 1\n clasificate = is_bouncy(str(num))\n result = evaluate(clasificate , num)\n if result:\n total_numbers = total_numbers + 1\n porc = total_numbers * 100 / num\n return num\n return 0", "def compare(self) -> int:", "def demo_a_number():\n random_number=randint(0,100)\n number=randint(0,100)\n print (random_number)\n print (number)\n if number == random_number:\n print('correct number')\n while number!=random_number:\n if number >random_number:\n print('number too high')\n number=randint(0,number)\n print(number)\n else:\n print('number too low')\n number=randint(number,100)\n print(number)\n print ('correct number: ')\n print(number)", "def problem_52():\n\n for number in xrange(1, 123456789):\n sorted_num = ''.join(sorted(str(number)))\n if len([value for value in xrange(2, 7)\n if ''.join(sorted(str((value * number)))) == sorted_num]) == 5:\n return number", "def compare(self, value: int, /) -> None:", "def equal_pos_numbers(self):\n lst = [98, 98, 98]\n self.assertEqual(max_integer(lst), 98)", "def check_digit(tracking_number):\n check_digit = 10 - ((sum(itertools.starmap(operator.mul, zip(itertools.cycle((3, 1)), map(int, str(tracking_number))))) + 1) % 10)\n if check_digit == 10:\n check_digit = 0\n return check_digit", "def compare(number_to_guess, guess):\n if guess > number_to_guess:\n print('Too high. \\n Guess again')\n elif guess < number_to_guess:\n print('Too low. \\n Guess again')\n else:\n print(f'You got it! The answer was {number_to_guess}')", "def check_number(number):\n digits = str(number)\n if len(digits) != 6:\n return False\n\n double = False\n last = '0'\n for digit in digits:\n if digit < last:\n return False\n\n if digit == last:\n double = True\n\n last = digit\n\n return double", "def find_greatest_number(incoming_list):\n #magiclownumber= none\n #retval= magiclownumber\n #for value in incoming_list:\n #if not retval:\n #retval = value\n # if value> retvale\n #retval= value\n #return retval\n greatest_number = max(incoming_list)\n return greatest_number", "def meets_criteria2(num):\n output = True\n if not exactly_two_same_digits(num):\n output = False\n if not digits_increase(num):\n output = False\n return output", "def guess_a_number():\n\n # TODO:\n # generate a random number (uniformly distributed between 0 and 100)\n # read input from the user and validate that the input is numeric (use the function check_raw)\n # check whether the number was guessed \n # implement the functions evaluate_my_number, which checks whether the number is too high or too low\n # and print this information to the user\n # let the computer guess, therefore implement the demo_a_number function\n random_number=randint(0,100)\n \n '''versuche=0\n max_versuche=5\n guess=-1\n test= False\n while guess != random_number:\n while test == False:\n guess= input('Gib eine Zahl zwischen 0 und 100 ein: ')\n try:\n guess= int(guess)\n test=True\n except ValueError:\n print('Try Again')\n \n if guess == random_number:\n print('Du hast die Zahl erraten!')\n elif guess > random_number:\n print('Die Zahl ist zu gross')\n versuche=versuche+1\n else:\n print('Die Zahl ist zu klein')\n versuche=versuche+1'''", "def cmp_numcite( a, b ) :\n return cmp( int(b['Z9']), int(a['Z9']) )", "def test_same(self):\n same_int = [22, 22, 22, 22]\n self.assertEqual(max_integer(same_int), 22)", "def solution(N):\n\tresult = 0\n\tcounter = 0\n\n\tbin_number = \"{0:b}\".format(N)\n\n\tfor num in bin_number:\n\t\tif num == \"1\":\n\t\t\tif counter > result:\n\t\t\t\tresult = counter\n\t\t\tcounter = 0\n\t\telse:\n\t\t\tcounter += 1\n\n\treturn result;", "def evaluate_my_number(guess, random_number):\n if guess < random_number:\n print('Too low!')\n else: \n print ('Too high!')\n guess = check_raw()\n return guess", "def test03_comparison_operators(self):\n\n import _cppyy\n number = _cppyy.gbl.number\n\n assert (number(20) > number(10)) == True\n assert (number(20) < number(10)) == False\n assert (number(20) >= number(20)) == True\n assert (number(20) <= number(10)) == False\n assert (number(20) != number(10)) == True\n assert (number(20) == number(10)) == False", "def find_max_numb(x,y):\n if x > y:\n print(x, \" - is max number.\")\n return x \n else:\n print(y, \" - is max number.\")\n return y", "def compare(self, a: long, b: long) -> int:\n ...", "def solve_ok(number: int) -> int:\n return no_ones(number) % 2", "def evaluate_my_number(guess, random_number):", "def get_max(num_one, num_two):\n temp_a = int(str(num_one) + str(num_two))\n temp_b = int(str(num_two) + str(num_one))\n if temp_a >= temp_b:\n return num_one\n else:\n return num_two", "def solution():\n i = 1\n\n while True:\n if (\n sorted(str(i))\n == sorted(str(2 * i))\n == sorted(str(3 * i))\n == sorted(str(4 * i))\n == sorted(str(5 * i))\n == sorted(str(6 * i))\n ):\n return i\n\n i += 1", "def check_mountain_number(n):\n def helper(x, is_incresing):\n if x // 10 == 0:\n return True\n if is_incresing and (x % 10) < ((x // 10) % 10):\n return helper(x // 10, is_incresing)\n return (x % 10) > ((x // 10) % 10) and helper(x // 10, False)\n return helper(n, True)", "def order(num1, num2, num3):\n num123 = int(num1+num2+num3)\n num132 = int(num1+num3+num2)\n num213 = int(num2+num1+num3)\n num231 = int(num2+num3+num1)\n num312 = int(num3+num1+num2)\n num321 = int(num3+num2+num1)\n if num123 >= num132 and num123 >= num213 and num123 >= num231\\\n and num123 >= num312 and num123 >= num321:\n print(num123)\n elif num132 >= num123 and num132 >= num213 and num132 >= num231\\\n and num132 >= num312 and num132 >= num321:\n print(num132)\n elif num213 >= num132 and num213 >= num123 and num213 >= num231\\\n and num213 >= num312 and num213 >= num321:\n print(num213)\n elif num231 >= num132 and num231 >= num213 and num231 >= num123\\\n and num231 >= num312 and num231 >= num321:\n print(num231)\n elif num312 >= num132 and num312 >= num213 and num312 >= num231\\\n and num312 >= num123 and num312 >= num321:\n print(num312)\n elif num321 >= num132 and num321 >= num213 and num321 >= num231\\\n and num321 >= num312 and num321 >= num123:\n print(num321)\n elif num1 == \"0\" and num2 == \"0\" and num3 == \"0\":\n print(\"0\")", "def _compare(self, value, target):\n result = getattr(self.reg, target) - value\n self.reg.N = result >> 7\n self.reg.C = getattr(self.reg, target) >= value\n self.reg.Z = result == 0", "def check_number(self):\n digits = self.number\n _sum = 0\n alt = False\n ix = []\n for x in str(digits):\n ix.append(int(x))\n for d in reversed(ix):\n assert 0 <= d <= 9\n if alt:\n d *= 2\n if d > 9:\n d -= 9\n _sum += d\n alt = not alt\n return (_sum % 10) == 0", "def test_case03(self):\n version1 = versions.get_version_power(\"1.1.1\")\n version2 = versions.get_version_power(\"0.2.1\")\n self.assertGreater(version1, version2)" ]
[ "0.6480286", "0.6474917", "0.6066898", "0.60084206", "0.58431", "0.58227915", "0.58031726", "0.5782954", "0.5781479", "0.5774768", "0.57605845", "0.57545424", "0.57125413", "0.5712499", "0.5703884", "0.5632079", "0.56003505", "0.5598331", "0.5588869", "0.5585761", "0.5567629", "0.55597353", "0.5544031", "0.55423474", "0.55409074", "0.552856", "0.55165255", "0.5511736", "0.5508311", "0.5501007" ]
0.6772036
0
Modifies an input string
def modify_string(): modString = input("Please write a string. ") modNewStr = "" modCount = 1 for letter in modString: if modCount < 2: modNewStr = letter else: modNewStr = modNewStr + "-" + letter * modCount modCount += 1 print("New string: ", modNewStr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_text(str_in):\n str_out = get_model().replace(str_in)\n get_model().inc_call_counter()\n return str_out", "def mutStr(st):\n\tl = len(st)\n\tci = randomInt(0, l - 1)\n\tcv = st[ci]\n\tif cv.isdigit():\n\t\tr = selectRandomFromList(dig)\n\telif cv.isupper():\n\t\tr = selectRandomFromList(ucc)\n\telse:\n\t\tr = selectRandomFromList(lcc)\n\t\n\tnst = st[:ci] + r + st[ci+1:] if l > 1 else r\n\treturn nst", "def set_raw_string(self, string, length):\n if len(string) != length:\n raise ValueError('Length of passed string does not match length')\n self.originstring = string\n self.stringlength = length", "def mirror_string(the_string):\r\n return the_string + reverse_string(the_string)", "def setInputString(self, inputString):\n assert isinstance(inputString, basestring), \\\n \"Invalid template string!\"\n\n self.__inputString = inputString", "def changenonetostr(s,text='None'):\r\n if not s:\r\n return text\r\n else:\r\n return s", "def test_string_inplace_update(self):\r\n vm = String.value_manager(None, None, 'str')\r\n assert not vm.changed\r\n vm.value += 's'\r\n assert vm.changed", "def get_processed_string(self, input_string):\n if input_string[:6] == '[sic]\"':\n return input_string[6: -1]\n else:\n return input_string.format(**self)", "def string_rotate() -> str:\n\n\tdef input_str() -> str:\n\t\t\"\"\" This function make input of string data\"\"\"\n\t\tinput_string = str(input('Enter your string: '))\n\t\treturn input_string\n\n\tdef input_len() -> int:\n\t\t\"\"\" This function make input of length rotation string\"\"\"\n\t\tinput_length = int(input('Enter your length rotation: '))\n\t\treturn input_length\n\n\tinput_string = input_str()\n\tinput_length = input_len()\n\t\n\tchange_str = ''\n\t\n\tif input_length > 0:\n\t\tchange_str = input_string[input_length:len(input_string)] + input_string[0:input_length]\n\telif input_length < 0:\n\t\tchange_str = input_string[input_length:] + input_string[:input_length]\n\telse:\n\t\tprint(\"Intput length = 0\")\n\n\treturn print(change_str)", "def store_string(self, string: str) -> None:", "def replace_in_string(string, length, substring, idx):\n return string[:idx]+substring+string[idx+length:]", "def updateString(olddata,newdata,concater):\r\n\r\n if olddata==\"\":\r\n return str(newdata)\r\n else:\r\n return str(olddata + concater + newdata)", "def input_str() -> str:\n\t\tinput_string = str(input('Enter your string: '))\n\t\treturn input_string", "def replace(self, string: str) -> None:\n self.buffer.replace(self.index, string)", "def input_str() -> str:\n\tinput_string = str(input('Enter your string: '))\n\treturn input_string", "def process(self, s: str) -> str:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def transform_string(self, string):\n return self._transformString(string)", "def apply_to(self, string: str) -> str:\n return self.searchPattern.sub(self.sub_func, string)", "def update(self, str):\n if self._verbose:\n return\n\n # Print the necessary number of backspaces to erase the previous message.\n self._stream.write(\"\\b\" * len(self._last_update))\n self._stream.write(str)\n num_remaining = len(self._last_update) - len(str)\n if num_remaining > 0:\n self._stream.write(\" \" * num_remaining + \"\\b\" * num_remaining)\n self._last_update = str\n self._dirty = True", "def replace(self, old, new) -> String:\n pass", "def replace(self, string):\n # self.values is assigned in mix_iterator()\n for k, v in self.job.items():\n string = string.replace(k, v)\n return string", "def myreplace(old, new, s):\r\n if old.isspace(): # If a weird guy set \"old\" only have space(s)\r\n old = None\r\n return new.join(s.split(old))", "def myreplace(old, new, s):\n\n s = s.split()\n s = \" \".join(s)\n\n return new.join(s.split(old))", "def replace(content,line,word,string):\n\tline = int(line)\n\tstring = string.strip().replace(\" \",\" &^&%+\").split()\n\tif len(content)<int(line):\n\t\tprint \"Error: No such line.\"\n\t\treturn (content,False)\n\tif word is not None:\n\t\tword = int(word)\n\t\tif len(content[line-1])<int(word) or line>len(content):\n\t\t\tprint \"Error: No such word.\"\n\t\t\treturn (content,False)\n\t\tword = word-1\n\t\tcontent[line-1] = content[line-1][:word]+string+content[line-1][word+1:]\n\t\t\n\telse:\n\t\tline = line-1\n\t\tcontent[line] = string\n\treturn (content,True)", "def replace_one(opstr: str, old: str, new: str) -> str:\n count = opstr.count(old)\n if count != 1:\n raise Exception(\n f'expected 1 string occurrence; found {count}. String = {old}')\n return opstr.replace(old, new)", "def set_string(string, hash):\r\n # Pad out string with 3 nulls\r\n string = string + ([NULL_STRING] * 3)\r\n\r\n # If the string now longer than STRING_LENGTH, cut it shorter\r\n if len(string) > STRING_LENGTH:\r\n string = string[:STRING_LENGTH]\r\n\r\n # If the string is still too short, pad out with the hash\r\n if len(string) < STRING_LENGTH:\r\n string = string + hash[len(string) : STRING_LENGTH]\r\n\r\n return string", "def amend_str(text):\n text = text.lower()\n text = re.sub(r\"[^A-Za-z0-9^,!.\\/'+-=]\", \" \", text)\n text = re.sub(r\"what's\", \"what is \", text)\n text = re.sub(r\"that's\", \"that is \", text)\n text = re.sub(r\"there's\", \"there is \", text)\n text = re.sub(r\"it's\", \"it is \", text)\n text = re.sub(r\"\\'s\", \" \", text)\n text = re.sub(r\"\\'ve\", \" have \", text)\n text = re.sub(r\"can't\", \"can not \", text)\n text = re.sub(r\"n't\", \" not \", text)\n text = re.sub(r\"i'm\", \"i am \", text)\n text = re.sub(r\"\\'re\", \" are \", text)\n text = re.sub(r\"\\'d\", \" would \", text)\n text = re.sub(r\"\\'ll\", \" will \", text)\n text = re.sub(r\",\", \" \", text)\n text = re.sub(r\"\\.\", \" \", text)\n text = re.sub(r\"!\", \" ! \", text)\n text = re.sub(r\"\\/\", \" \", text)\n text = re.sub(r\"\\^\", \" ^ \", text)\n text = re.sub(r\"\\+\", \" + \", text)\n text = re.sub(r\"\\-\", \" - \", text)\n text = re.sub(r\"\\=\", \" = \", text)\n text = re.sub(r\"'\", \" \", text)\n text = re.sub(r\"(\\d+)(k)\", r\"\\g<1>000\", text)\n text = re.sub(r\":\", \" : \", text)\n text = re.sub(r\" e g \", \" eg \", text)\n text = re.sub(r\" b g \", \" bg \", text)\n text = re.sub(r\" u s \", \" american \", text)\n text = re.sub(r\"\\0s\", \"0\", text)\n text = re.sub(r\" 9 11 \", \"911\", text)\n text = re.sub(r\"e - mail\", \"email\", text)\n text = re.sub(r\"j k\", \"jk\", text)\n text = re.sub(r\"\\s{2,}\", \" \", text)\n\n return text.strip()", "def update(name):\n strRet = mapping(name)\n return strRet", "def stringSmuMod(self, arg):\n\t\targ[0] = \"'\" + arg[0] + \"'\"\n\t\targ[2] = \"'\" + arg[2] + \"'\"\n\t\treturn arg", "def get_input_string():\n return input(\"Enter input string: \")" ]
[ "0.6333922", "0.5938327", "0.59197223", "0.59008723", "0.5891288", "0.5889071", "0.5888334", "0.58578706", "0.58334684", "0.58226764", "0.5804948", "0.5804034", "0.5773653", "0.57354724", "0.5726661", "0.5689621", "0.5688094", "0.56491864", "0.56426936", "0.55888075", "0.5581347", "0.55767107", "0.55580527", "0.5550767", "0.55417025", "0.5514233", "0.5508535", "0.5493239", "0.54898304", "0.54705095" ]
0.6682068
0
Picks the upper case letters and concatenates them into a new string
def akronom(): akro_str = input("Write a sentance with both upper case and lower case letters. ") akronomed_str = "" for letter in akro_str: if str.isupper(letter): akronomed_str += letter print(akronomed_str)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def upper(string):\n new_string = '' # Empty string to append to\n for char in string: # Itterate over each character in user's string\n if char.isalpha() and not char.isupper(): # If the character is an alphabet and not already uppercase\n char = (chr(ord(char) - 32)) # Subtract 32 from it's ASCI value to get the uppercase alphabet\n if char.isalnum() or char == ' ': # Preserve spaces, and ignore special characters such as punctuation etc.\n new_string += char # Append capitalized characters and spaces to the new string\n return new_string # return the capitalized string", "def convert_to_uppercase(text):\n return text.upper()", "def upper(self):\n if not self._upper_string:\n self._upper_string = \"\".join(self.upper_word_list)\n return self._upper_string", "def UPPER(text):\n return text.upper()", "def to_upper(self, text):\n\t\treturn text.upper()", "def UCase(text):\n return text.upper()", "def upper(self) -> String:\n pass", "def question_8(upper_string: str) -> str:\n newString = []\n for i,n in enumerate(upper_string):\n if n.isupper():\n newString.append(str(i))\n return newString", "def upper_first_char(self,strz):\n\t\tif (len(strz) < 2) or no_upper: #no_upper list of element to no upper \n\t\t\treturn strz\n\t\telse:\n\t\t\treturn strz[0].upper()+strz[1:]", "def upper(value):\n return value.upper()", "def abbreviate(x: str) -> str:\n i = 0\n abbreviation: str = \"\"\n while i < len(x):\n if x[i].isupper():\n abbreviation += x[i]\n i += 1\n return abbreviation", "def capify(text):\n return text[0].upper() + text[1:]", "def upper(value): # Only one argument.\n return value.upper()", "def sep_upper(text):\n \n new_text = \"\"\n for letter in text:\n if letter.isupper():\n new_text += \" \" + letter\n else:\n new_text += letter\n \n return new_text", "def split_uppercase(word):\r\n final_word = ''\r\n for i in word:\r\n final_word += ' %s' % i if i.isupper() else i\r\n\r\n return final_word.strip()", "def task18_letter_replacement(text):\n if text and isinstance(text, str):\n new_text = []\n for char in text:\n new_char_index = ascii_lowercase.index(char) + 1\n new_char = ascii_lowercase[new_char_index]\n if new_char in 'aeiou':\n new_char = new_char.upper()\n new_text.append(new_char)\n return ''.join(new_text)\n else:\n raise ValueError", "def uppercase(str):\n \n return str.upper()", "def uppersnakecase(string):\n\n return uppercase(snakecase(string))", "def uppercase(string):\n\n return str(string).upper()", "def ReplaceCapitals(string):\n\n newString = \"\"\n for char in string:\n if char.isupper():\n newString += \"_%s\"%char.lower()\n else:\n newString += char\n\n return newString", "def __process_word(self, word):\n output = ''\n capitals = self.__capital_positions(word)\n c_index = 0\n\n for c in word:\n if c_index in capitals:\n output += c.upper()\n else:\n output += c.lower()\n\n c_index += 1\n\n return output", "def ReturnUpper(text):\n try:\n text = text.upper()\n return text\n except:\n pass", "def capitalize(result):\n\treturn result.upper()", "def firstupper(value):\n if not isinstance(value, (str, unicode,)): return value\n else: return mark_safe(value[0].upper() + value[1:])", "def capitalize(phrase):\n new_phrase = phrase[0].upper()\n \n # loop through the phrase starting from index 1\n # concat each letter from phrase to new_phrase\n for i in range(1, len(phrase)):\n new_phrase += phrase[i]\n\n return new_phrase", "def build_abbreviation(agency_name):\n abbreviation = ''\n for ch in agency_name:\n if ch in string.ascii_uppercase:\n abbreviation += ch\n return abbreviation", "def ladcased(normal):\r\n\r\n ladified = ''\r\n for i, c in enumerate(normal):\r\n ladified += c.lower() if (i % 2 == 0) else c.upper()\r\n\r\n return ladified", "def capify(word, reference):\n new_word = \"\"\n\n # First check whole word before char-by-char\n if reference.islower():\n return word.lower()\n elif reference.isupper():\n return word.upper()\n\n # Char-by-char checks\n for i, c in enumerate(reference):\n if c.isupper():\n new_word += word[i].upper()\n else:\n new_word += word[i]\n return new_word", "def tr_upper_to_lower(text):\n out = []\n for ch in text:\n if ch in tr_upper_to_lower_dict:\n out.append(tr_upper_to_lower_dict[ch])\n else:\n out.append(ch.lower())\n \n return \"\".join(out)", "def myfunc (some_str):\n new_string = str(some_str).lower()\n next_string = \"\"\n for i, v in enumerate(new_string):\n if i % 2 == 0:\n next_string += v.upper()\n else:\n next_string += v.lower()\n return next_string" ]
[ "0.7568085", "0.7175322", "0.7120905", "0.71035486", "0.7063931", "0.7037421", "0.7026546", "0.70182836", "0.6939869", "0.68284166", "0.6808109", "0.6803818", "0.6799995", "0.6797273", "0.6720664", "0.66864145", "0.66418535", "0.66384387", "0.66032445", "0.6589709", "0.6589281", "0.6584653", "0.6570046", "0.65401495", "0.6534109", "0.6508843", "0.6493433", "0.64917964", "0.6472694", "0.64534026" ]
0.74459803
1
Masks a string of numbers with but leaves the last 4 digits
def str_mask(): mask_input = input("Provide a long number. ") mask_length = len(mask_input) last4 = mask_input[-4:] mask = multiply(mask_length - 4, "#") masked_str = mask + last4 print(masked_str)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clean_phone(number):\n numberlist = re.findall(\"\\d\",number)\n new_number = \"\".join(numberlist)\n if len(new_number) == 8:\n \tnew_number = \"010\" + new_number\n\tnew_number = new_number[-11:]\n\tif new_number.startswith('1'):\n\t\tnew_number = \"+86-\" + new_number\n\telse:\n\t\tnew_number = \"+86-10-\" + new_number[-8:]\n\treturn new_number", "def safe_number(self):\n mask = '*' * (len(self.account_number) - 4)\n return '{0}{1}'.format(mask, self.account_number[-4:])", "def replace_digits(text):\n text = re.sub('[0-9]', '5', text)\n return text", "def safe_number(self):\n mask = '*' * (len(self.card_number) - 4)\n return '{0}{1}'.format(mask, self.card_number[-4:])", "def number2patten(number, length):\n if length == 1:\n return NUMBER_TO_BASE[number]\n prefix_index = number // 4\n base = NUMBER_TO_BASE[number % 4]\n return number2patten(prefix_index, length - 1) + base", "def compact(number):\n number = clean(number).strip().replace(' ', '-').split('-')\n if len(number) == 4:\n # zero pad the different sections if they are found\n lengths = (2, 4, 7, 3)\n return ''.join(n.zfill(l) for n, l in zip(number, lengths))\n else:\n # otherwise zero pad the account type\n number = ''.join(number)\n return number[:13] + number[13:].zfill(3)", "def zeros_padding_to_number_digits(mystring):\n\treturn ''.join([format(int(x), '05d') if x.isdigit() else x for x in re.split(r'(\\d+)', mystring)])", "def refrm(s):\n s2 = s[5:10] + s[4] + s[0:4]\n return s2", "def number_to_pattern(number, length):\n\n idx = 'ACGT'\n pattern = ''\n while number > 0:\n pattern += idx[number % 4]\n number //= 4\n return idx[0] * (length - len(pattern)) + pattern[::-1]", "def evalute_number(dialed):\n if (len(dialed) == 11 or len(dialed) == 10) and str(dialed).startswith(\"0\"):\n # UK Number\n return \"+44%s\" % (dialed[1:])\n elif len(dialed) == 6:\n # Local Fishguard numbers\n return \"+441348%s\" % (dialed)\n return None", "def getMangledNum(self):\n return (\"X\" * (len(self.num)-4)) + self.num[-4:]", "def clean_numbers(text):\n return regex.sub(\"\\d+\", ' NUM', text)", "def compact(number):\n number = clean(number, ' ').upper().strip()\n if number.startswith('AL'):\n number = number[2:]\n if number.startswith('(AL)'):\n number = number[4:]\n return number", "def clean_code(code, lengte):\n return code.zfill(lengte)", "def tweet_clean_numbers(word):\n if not re.search(r'[0-9]+', word):\n return word\n if len(word)==4 and re.search(r'[0-9]{4}', word) and 1900 < int(word) < 2019:\n return word\n word = re.sub(r'^([0-9]|[\\+\\-%/\\*\\.:])+[0-9%/\\+\\*\\.x:]*$', '<number>', word)\n return word", "def _remove_area_code(phone):\n\n if not phone.startswith('+46'):\n return phone\n else:\n return '0' + phone[3:]", "def replace_digits(text):\n text = re.sub(r\"\\d+\", \"number\", text)\n \n return text", "def clean(number):\n digits = [c for c in number if c.isdigit()]\n if len(digits) == 11 and digits[0] == \"1\":\n return ''.join(digits[1:])\n elif len(digits) != 10:\n return \"0000000000\"\n else:\n return ''.join(digits)", "def fixNumber(sval):\n\n r, val = VALID_RE.match(sval.strip()).groups()\n parts = VALPARTS_RE.findall(val)\n dpart = parts.pop(-1)\n if parts:\n return (r or \"\") + \"\".join(parts) + \".\" + dpart\n return (r or \"\") + dpart", "def strip_leading_chars(val):\n for i, c in enumerate(val):\n if c in \"0123456789.\":\n return val[i:]\n return \"\"", "def phone(raw_phone):\n\n phone = raw_phone.replace('+33', '0')\n phone = '{} {} {} {} {}'.format(\n phone[0:2],\n phone[2:4],\n phone[4:6],\n phone[6:8],\n phone[8:10])\n return phone", "def zh_num2digit(string):\n for match in zh_nums_iter(string):\n num_str = match.group(0)\n digit_num = parse_zh_num(num_str)\n if digit_num is None:\n continue\n string = string.replace(num_str, str(digit_num), 1)\n return string", "def replace_numbers(text, replace_with=\"_NUMBER_\"):\n return RE_NUMBER.sub(replace_with, text)", "def transform(s):\r\n return 'digit ' + str(s)", "def patten2number(sequence):\n try:\n if len(sequence) == 0:\n return 0\n last_base = sequence[-1]\n prefix = sequence[:-1]\n return 4 * patten2number(prefix) + BASE_TO_NUMBER[last_base]\n except KeyError:\n raise ValueError('Not able to convert nucleotide: %s' % last_base)", "def mask_acct_no(column):\n return column.str.replace(r'\\d*\\*{3,}\\d*|\\d+(\\-\\d+){2,}', ' $ACCT_NO ')", "def zero_digits(s):\n return re.sub('\\d', '0', s)", "def replace_street(street):\r\n if isinstance(street, str):\r\n for rep in replacements:\r\n street = re.sub(rep, \"\", street)\r\n\r\n streetint = re.findall(r'\\d+', str(street))\r\n if len(streetint) > 0 and int(streetint[0]) < 100:\r\n street = int(streetint[0])\r\n\r\n if street < 10:\r\n street = '0' + str(street) + str(streetnums[str(street)])\r\n elif street < 14:\r\n street = str(street) + 'TH'\r\n else:\r\n street = str(street) + str(streetnums[str(street)[-1]])\r\n\r\n\r\n return street", "def first_four_last_four(seq):\n seq = seq[4:-4:2]\n return seq", "def padding_zeroes(number, length_string):\n return str(number).zfill(length_string)" ]
[ "0.67827255", "0.67305297", "0.67131793", "0.65890765", "0.64678043", "0.6460441", "0.64570034", "0.63391024", "0.6272115", "0.6179286", "0.615589", "0.61505246", "0.60001665", "0.59832764", "0.5979442", "0.5976167", "0.59505975", "0.5931693", "0.5921734", "0.5915949", "0.5902615", "0.5862172", "0.5849867", "0.5817672", "0.5817625", "0.581033", "0.58099616", "0.5807293", "0.5790461", "0.57903093" ]
0.7082807
0
Returns the Python source for this file, if it is available, or None if it is not.
def _read_source(self): if self.fileType == FTPythonCompiled or \ self.fileType == FTCompiledModule: return None filename = Filename(self.filename) filename.setExtension('py') try: file = open(filename, 'rU') except IOError: return None return file.read()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getsourcefile(object):\r\n filename = getfile(object)\r\n if string.lower(filename[-4:]) in ('.pyc', '.pyo'):\r\n filename = filename[:-4] + '.py'\r\n for suffix, mode, kind in imp.get_suffixes():\r\n if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:\r\n # Looks like a binary file. We want to only return a text file.\r\n return None\r\n if os.path.exists(filename):\r\n return filename\r\n # only return a non-existent filename if the module has a PEP 302 loader\r\n if hasattr(getmodule(object, filename), '__loader__'):\r\n return filename\r\n # or it is in the linecache\r\n if filename in linecache.cache:\r\n return filename", "def _read_code(self):\n\n if self.fileType == FTPythonCompiled:\n # It's a pyc file; just read it directly.\n pycVfile = vfs.getFile(self.filename, False)\n if pycVfile:\n return self._loadPyc(pycVfile, None)\n return None\n\n elif self.fileType == FTCompiledModule:\n return None\n\n # It's a .py file (or an __init__.py file; same thing). Read\n # the .pyc file if it is available and current; otherwise read\n # the .py file and compile it.\n pycFilename = Filename(self.filename)\n pycFilename.setExtension(pycExtension)\n pycVfile = vfs.getFile(pycFilename, False)\n t_pyc = None\n if pycVfile:\n t_pyc = pycVfile.getTimestamp()\n\n code = None\n if t_pyc and t_pyc >= self.timestamp:\n code = self._loadPyc(pycVfile, self.timestamp)\n\n if not code:\n source = self._read_source()\n filename = Filename(self.filename)\n filename.setExtension('py')\n code = self._compile(filename, source)\n\n return code", "def source(self):\n return some.dap.source(py.path.local(self.co_filename))", "def get_main_source(self):\n\t\tpath, loader = self.main\n\t\tif path is not None:\n\t\t\treturn loader.get_source(path)", "def _getscriptsource(self):\n\t\tscriptname = misc.sysinfo.script_name.rstrip(\"c\")\n\t\ttry:\n\t\t\tencoding = tokenize.detect_encoding(open(scriptname, \"rb\").readline)[0]\n\t\t\twith open(scriptname, \"r\", encoding=encoding, errors=\"replace\") as f:\n\t\t\t\tself.source = f.read()\n\t\texcept IOError: # Script might have called ``os.chdir()`` before\n\t\t\tself.source = None", "def get_source(self) -> Optional[str]:\n return self._source", "def get_source(self, fullmodname):\n submodname, is_package, fullpath, source = self._get_source(fullmodname)\n return source", "def code(self):\n code = self._code\n if code is None:\n raise IOError('source code not available')\n return code", "def code(self):\n code = self._code\n if code is None:\n raise IOError('source code not available')\n return code", "def get_source_file(self):\n return self.get_attribute(\"source_file\")", "def get_source(self):\n\t\treturn self.source.get_source()", "def _get_source(self, fullmodname):\n submodname, is_package, relpath = self._get_info(fullmodname)\n fullpath = self.path_entry + relpath\n source = self.datablocks[relpath]\n if hasattr(source, \"decode\"):\n source = source.decode(\"UTF-8\")\n source = source.replace('\\r\\n', '\\n')\n source = source.replace('\\r', '\\n')\n return submodname, is_package, fullpath, source", "def source(self):\n return self._source_code", "def _read_sourced_path(self, line):\n # type: (str)->tp.Optional[str]\n if line.startswith('source '):\n sline = [x.strip() for x in line.split()]\n sline.pop(0)\n path = ' '.join(sline)\n if not os.path.isabs(path):\n current_root = self._root_interfaces_path\n if os.path.isfile(current_root):\n current_root = os.path.dirname(current_root)\n path = os.path.join(current_root, path)\n return path\n return None", "def get_source():\n if len(sys.argv) > 1:\n return open(sys.argv[1])\n else:\n return sys.stdin", "def GetSrc():\n return os.path.abspath(os.path.join(_THIS_DIR, os.pardir, os.pardir,\n os.pardir))", "def get_source (self, name):\n containment = self.containments.get (name)\n if containment is None:\n raise ImportError ('No such module: \\'{}\\''.format (name))\n return (containment [0] if sys.version_info [0] > 2 else\n containment [0].encode ('utf-8'))", "def get_code(self, fullname):\n self.__get_module(fullname) # eventually raises ImportError\n return None", "def source(self) -> str | Path:\n return self._source", "def _get_source_path(self, docmeta: DocMetadata) -> Optional[str]:\n identifier = docmeta.arxiv_identifier\n version = docmeta.version\n file_noex = identifier.filename\n if not docmeta.is_latest:\n parent_path = self._get_parent_path(identifier, version)\n file_noex = f'{file_noex}v{version}'\n else:\n parent_path = self._get_parent_path(identifier)\n\n for extension in VALID_SOURCE_EXTENSIONS:\n possible_path = os.path.join(\n parent_path,\n f'{file_noex}{extension[0]}')\n if os.path.isfile(possible_path):\n return possible_path\n return None", "def source(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"source\")", "def get_src(self):\n return self.isy.prog_get_src(self._mydict['id'])", "def source_path(self, rel_path):\n # TODO download to disk (use source cache) in case of remote file\n import inspect\n\n frame = inspect.currentframe().f_back\n calling_file = frame.f_code.co_filename\n\n if (\n self.included_stack\n and calling_file == self.included_stack[-1].get_path_or_uri()\n ):\n # called from current snakefile, we can try to keep the original source\n # file annotation\n # This will only work if the method is evaluated during parsing mode.\n # Otherwise, the stack can be empty already.\n path = self.current_basedir.join(rel_path)\n orig_path = path.get_path_or_uri()\n else:\n # heuristically determine path\n calling_dir = os.path.dirname(calling_file)\n path = smart_join(calling_dir, rel_path)\n orig_path = path\n\n return sourcecache_entry(\n self.sourcecache.get_path(infer_source_file(path)), orig_path\n )", "def _get_source(self, uri: str) -> Optional[_Source]:\n\n for source in self._sources:\n if uri == source.uri:\n return source\n\n return None", "def __fetch_remote_source(self):\n # type: () -> Union(Git, None)\n if self.source == 'git':\n return self.git_source_class(**self.configuration).fetch()\n return None", "def getSource():", "def _get_scripts_resource(pe):\n return next(\n (\n entry.directory.entries[0].directory.entries[0]\n for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries\n if entry.name and entry.name.string == b\"PYTHONSCRIPT\"\n ),\n None,\n )", "def findsource(obj):\n filename = inspect.getsourcefile(obj)\n if filename:\n linecache.checkcache(filename)\n return inspect.findsource(obj)", "def source(self) -> Optional[str]:\n return pulumi.get(self, \"source\")", "def read_source(self, env: BuildEnvironment) -> str:\n content = self.source.read()\n\n # emit \"source-read\" event\n arg = [content]\n env.events.emit('source-read', env.docname, arg)\n return arg[0]" ]
[ "0.71008605", "0.7004", "0.69255656", "0.6722575", "0.67026246", "0.67018044", "0.65404385", "0.6464617", "0.6464617", "0.64623195", "0.64467514", "0.6408846", "0.62653047", "0.6264861", "0.6234833", "0.6153612", "0.608844", "0.605979", "0.60058707", "0.6003805", "0.60012", "0.59808594", "0.5978375", "0.5893533", "0.5888467", "0.58847976", "0.5881488", "0.5861391", "0.5850097", "0.58476114" ]
0.77578664
0
Returns the Python compiled code object for this file, if it is available, or None if it is not.
def _read_code(self): if self.fileType == FTPythonCompiled: # It's a pyc file; just read it directly. pycVfile = vfs.getFile(self.filename, False) if pycVfile: return self._loadPyc(pycVfile, None) return None elif self.fileType == FTCompiledModule: return None # It's a .py file (or an __init__.py file; same thing). Read # the .pyc file if it is available and current; otherwise read # the .py file and compile it. pycFilename = Filename(self.filename) pycFilename.setExtension(pycExtension) pycVfile = vfs.getFile(pycFilename, False) t_pyc = None if pycVfile: t_pyc = pycVfile.getTimestamp() code = None if t_pyc and t_pyc >= self.timestamp: code = self._loadPyc(pycVfile, self.timestamp) if not code: source = self._read_source() filename = Filename(self.filename) filename.setExtension('py') code = self._compile(filename, source) return code
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def code(self):\r\n if (os.path.exists(self._bytecode_path) and\r\n os.path.getmtime(self.full_path) <= os.path.getmtime(self._bytecode_path)):\r\n with open(self._bytecode_path, 'rb') as bytecode:\r\n return marshal.load(bytecode)\r\n else:\r\n with open(self.full_path, 'rb') as source:\r\n code = compile(source.read(), self.full_path, 'exec')\r\n with open(self._bytecode_path, 'wb') as bytecode:\r\n marshal.dump(code, bytecode)\r\n return code", "def _get_codeobj(pyfile):\n from imp import PY_COMPILED, PY_SOURCE\n\n result, fileobj, fullpath = _check_if_pyc(pyfile)\n\n # WARNING:\n # fp.read() can blowup if the module is extremely large file.\n # Lookout for overflow errors.\n try:\n data = fileobj.read()\n finally:\n fileobj.close()\n\n # This is a .pyc file. Treat accordingly.\n if result is PY_COMPILED:\n # .pyc format is as follows:\n # 0 - 4 bytes: Magic number, which changes with each create of .pyc file.\n # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is \"\\r\\n\".\n # 4 - 8 bytes: Datetime value, when the .py was last changed.\n # 8 - EOF: Marshalled code object data.\n # So to get code object, just read the 8th byte onwards till EOF, and\n # UN-marshal it.\n import marshal\n code_obj = marshal.loads(data[8:])\n\n elif result is PY_SOURCE:\n # This is a .py file.\n code_obj = compile(data, fullpath, 'exec')\n\n else:\n # Unsupported extension\n raise Exception(\"Input file is unknown format: {0}\".format(fullpath))\n\n # Return code object\n return code_obj", "def _read_source(self):\n \n if self.fileType == FTPythonCompiled or \\\n self.fileType == FTCompiledModule:\n return None\n \n filename = Filename(self.filename)\n filename.setExtension('py')\n try:\n file = open(filename, 'rU')\n except IOError:\n return None\n return file.read()", "def code(self):\n code = self._code\n if code is None:\n raise IOError('source code not available')\n return code", "def code(self):\n code = self._code\n if code is None:\n raise IOError('source code not available')\n return code", "def get_code (self, name):\n containment = self.containments.get (name)\n if containment is None:\n raise ImportError ('No such module: \\'{}\\''.format (name))\n return compile (containment [0], containment [1], 'exec')", "def compile(self):\n return None # pragma: no cover", "def get_code(self, fullmodname):\n submodname, is_package, fullpath, source = self._get_source(fullmodname)\n return compile(source, fullpath, 'exec')", "def _compile(self, source: str, filename: str) -> CodeType:\n return compile(source, filename, \"exec\") # type: ignore", "def get_code(self, fullname):\n\t\tsource_path = self.get_filename(fullname)\n\t\tsource_mtime = None\n\t\ttry:\n\t\t\tbytecode_path = cache_from_source(source_path)\n\t\texcept NotImplementedError:\n\t\t\tbytecode_path = None\n\t\telse:\n\t\t\ttry:\n\t\t\t\tst = self.path_stats(source_path)\n\t\t\texcept NotImplementedError:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tsource_mtime = int(st['mtime'])\n\t\t\t\ttry:\n\t\t\t\t\tdata = self.get_data(bytecode_path)\n\t\t\t\texcept IOError:\n\t\t\t\t\tpass\n\t\t\t\telse:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tbytes_data = self._bytes_from_bytecode(fullname, data,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t bytecode_path,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t st)\n\t\t\t\t\texcept (ImportError, EOFError):\n\t\t\t\t\t\tpass\n\t\t\t\t\telse:\n\t\t\t\t\t\t_verbose_message('{} matches {}', bytecode_path,\n\t\t\t\t\t\t\t\t\t\tsource_path)\n\t\t\t\t\t\tfound = marshal.loads(bytes_data)\n\t\t\t\t\t\tif isinstance(found, _code_type):\n\t\t\t\t\t\t\t_imp._fix_co_filename(found, source_path)\n\t\t\t\t\t\t\t_verbose_message('code object from {}',\n\t\t\t\t\t\t\t\t\t\t\tbytecode_path)\n\t\t\t\t\t\t\treturn found\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tmsg = \"Non-code object in {}\"\n\t\t\t\t\t\t\traise ImportError(msg.format(bytecode_path),\n\t\t\t\t\t\t\t\t\t\t\t name=fullname, path=bytecode_path)\n\t\tsource_bytes = self.get_data(source_path)\n\t\tcode_object = self.source_to_code(source_bytes, source_path)\n\t\t_verbose_message('code object from {}', source_path)\n\t\tif (not sys.dont_write_bytecode and bytecode_path is not None and\n\t\t\tsource_mtime is not None):\n\t\t\tdata = bytearray(_MAGIC_BYTES)\n\t\t\tdata.extend(_w_long(source_mtime))\n\t\t\tdata.extend(_w_long(len(source_bytes)))\n\t\t\tdata.extend(marshal.dumps(code_object))\n\t\t\ttry:\n\t\t\t\tself._cache_bytecode(source_path, bytecode_path, data)\n\t\t\t\t_verbose_message('wrote {!r}', bytecode_path)\n\t\t\texcept NotImplementedError:\n\t\t\t\tpass\n\t\treturn code_object", "def getCompiled(self):\n if self.isCompiled():\n return self.program\n else:\n raise Exception(\"el programa no ha sido compilado aun\")", "def get_code(self, fullname: str) -> Any:\n path = self.get_filename(fullname)\n data = self.get_data(path)\n\n # print(\"XPYCE FileLoader Fullname: {}, path from get_filename is {}\".format(fullname, path))\n # It is important to normalize path case for platforms like Windows\n decryption_key = None\n for prefix in PREFIXES:\n if decryption_key:\n break\n lookup_module = relpath(path, start=prefix).replace('/', '.').replace('\\\\', '.')\n for module in XPYCEPathFinder.KEYS:\n if module == lookup_module:\n decryption_key = XPYCEPathFinder.KEYS[module]\n break\n if not decryption_key:\n raise KeyError(\"Cannot find decryption_key for module '{}'\".format(fullname))\n\n try:\n data = decrypt(data, decryption_key)\n except Exception as e:\n print(\"Could not decrypt module '{}' with provided decryption_key\".format(fullname))\n raise e\n\n # .pyc changed from 3 32-bit words to 4 32-bit words with Python3.7\n # Skip over the header to get to the raw data\n\n if sys.version_info.minor < 7:\n bytes_data = data[12:]\n else:\n bytes_data = data[16:]\n\n return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path)", "def get_code(self, fullname):\n source_path = self.get_filename(fullname)\n source_bytes = self.get_data(source_path)\n return compile(source_bytes, source_path, 'exec',\n dont_inherit=True)", "def get_code(self, fullname):\n self.__get_module(fullname) # eventually raises ImportError\n return None", "def _compile(self, filename, source):\n \n if source and source[-1] != '\\n':\n source = source + '\\n'\n code = __builtin__.compile(source, filename.cStr(), 'exec')\n\n # try to cache the compiled code\n pycFilename = Filename(filename)\n pycFilename.setExtension(pycExtension)\n try:\n f = open(pycFilename, 'wb')\n except IOError:\n pass\n else:\n f.write('\\0\\0\\0\\0')\n f.write(struct.pack('<I', self.timestamp))\n f.write(marshal.dumps(code))\n f.flush()\n f.seek(0, 0)\n f.write(imp.get_magic())\n f.close()\n\n return code", "def _loadPyc(self, vfile, timestamp):\n code = None\n f = open(vfile, 'rb')\n if f.read(4) == imp.get_magic():\n t = struct.unpack('<I', f.read(4))[0]\n if not timestamp or t == timestamp:\n code = marshal.loads(f.read())\n f.close()\n return code", "def get_compiler() -> XMLSnippetCompiler:\n THREAD_LOCALS = access_thread_locals()\n try:\n compiler = THREAD_LOCALS.XMLSnippet_00000001_compiler_singleton\n except AttributeError:\n THREAD_LOCALS.XMLSnippet_00000001_compiler_singleton = XMLSnippetCompiler()\n compiler = THREAD_LOCALS.XMLSnippet_00000001_compiler_singleton\n return compiler", "def getsourcefile(object):\r\n filename = getfile(object)\r\n if string.lower(filename[-4:]) in ('.pyc', '.pyo'):\r\n filename = filename[:-4] + '.py'\r\n for suffix, mode, kind in imp.get_suffixes():\r\n if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:\r\n # Looks like a binary file. We want to only return a text file.\r\n return None\r\n if os.path.exists(filename):\r\n return filename\r\n # only return a non-existent filename if the module has a PEP 302 loader\r\n if hasattr(getmodule(object, filename), '__loader__'):\r\n return filename\r\n # or it is in the linecache\r\n if filename in linecache.cache:\r\n return filename", "def source(self):\n return some.dap.source(py.path.local(self.co_filename))", "def get_code(entry_point, computer):\n\n try:\n executable = executables[entry_point]\n except KeyError:\n raise KeyError(\n \"Entry point '{}' not recognized. Allowed values: {}\".format(\n entry_point, list(executables.keys())))\n\n codes = Code.objects.find(filters={'label': executable}) # pylint: disable=no-member\n if codes:\n return codes[0]\n\n path = get_path_to_executable(executable)\n code = Code(\n input_plugin_name=entry_point,\n remote_computer_exec=[computer, path],\n )\n code.label = executable\n return code.store()", "def get_compiler() -> XMLCompiler:\n THREAD_LOCALS = access_thread_locals()\n try:\n compiler = THREAD_LOCALS.XML_00000001_compiler_singleton\n except AttributeError:\n THREAD_LOCALS.XML_00000001_compiler_singleton = XMLCompiler()\n compiler = THREAD_LOCALS.XML_00000001_compiler_singleton\n return compiler", "def cache(self, code, *args, **kwargs):\n try:\n compiled = memoized_parse_block(code)\n except CoconutException:\n logger.display_exc()\n return None\n else:\n return super(CoconutCompiler, self).cache(compiled, *args, **kwargs)", "def get_compiler() -> EBNFCompiler:\n THREAD_LOCALS = access_thread_locals()\n try:\n compiler = THREAD_LOCALS.EBNF_00000001_compiler_singleton\n except AttributeError:\n THREAD_LOCALS.EBNF_00000001_compiler_singleton = EBNFCompiler()\n compiler = THREAD_LOCALS.EBNF_00000001_compiler_singleton\n return compiler", "def get_runtime(self):\n import io\n from ..api import asm\n\n asm_src = \"\"\n return asm(io.StringIO(asm_src), self)", "def compile(self, workdir):\n with open(workdir) as f:\n ast = self.parser.generate_ast(f.read())\n f.close()\n\n return None", "def extract_code_objects(pe):\n script_res = _get_scripts_resource(pe)\n dump = _resource_dump(pe, script_res)\n return _get_co_from_dump(dump)", "def compileModule(self, code):\n r = ast.Module(None, self.compileSuite(code))\n #print r\n return r", "def source(self):\n return self._source_code", "def get_code(func):\n import inspect\n\n raw = \"\".join(inspect.getsource(func))\n found = re.findall(\"(k = .*)\", raw)\n\n if any(found):\n code = found[0]\n return code\n else:\n return \"\"", "def dafile_to_pycode(filename, args=None, _optimize=-1, dfile=None):\n pyast = dafile_to_pyast(filename, args)\n if pyast is not None:\n return _pyast_to_pycode(pyast,\n dfile if dfile is not None else filename,\n _optimize)\n else:\n return None" ]
[ "0.75335735", "0.7469139", "0.7044674", "0.67842853", "0.67842853", "0.6689265", "0.66375077", "0.6616381", "0.6603935", "0.65416414", "0.6507916", "0.65020955", "0.64757884", "0.64366883", "0.6418413", "0.6334043", "0.623301", "0.61632675", "0.6077583", "0.5995937", "0.5955363", "0.59236664", "0.59199923", "0.5882447", "0.58471924", "0.57451725", "0.57410145", "0.573484", "0.56826794", "0.5659665" ]
0.8227531
0
Reads and returns the marshal data from a .pyc file.
def _loadPyc(self, vfile, timestamp): code = None f = open(vfile, 'rb') if f.read(4) == imp.get_magic(): t = struct.unpack('<I', f.read(4))[0] if not timestamp or t == timestamp: code = marshal.loads(f.read()) f.close() return code
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_co_from_dump(data):\n # Read py2exe header\n current = struct.calcsize(b'iiii')\n metadata = struct.unpack(b'iiii', data[:current])\n\n # check py2exe magic number\n # assert(metadata[0] == 0x78563412)\n logging.info(\"Magic value: %x\", metadata[0])\n logging.info(\"Code bytes length: %d\", metadata[3])\n\n arcname = ''\n while six.indexbytes(data, current) != 0:\n arcname += chr(six.indexbytes(data, current))\n current += 1\n logging.info(\"Archive name: %s\", arcname or '-')\n\n code_bytes = data[current + 1:]\n return marshal.loads(code_bytes)", "def _get_codeobj(pyfile):\n from imp import PY_COMPILED, PY_SOURCE\n\n result, fileobj, fullpath = _check_if_pyc(pyfile)\n\n # WARNING:\n # fp.read() can blowup if the module is extremely large file.\n # Lookout for overflow errors.\n try:\n data = fileobj.read()\n finally:\n fileobj.close()\n\n # This is a .pyc file. Treat accordingly.\n if result is PY_COMPILED:\n # .pyc format is as follows:\n # 0 - 4 bytes: Magic number, which changes with each create of .pyc file.\n # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is \"\\r\\n\".\n # 4 - 8 bytes: Datetime value, when the .py was last changed.\n # 8 - EOF: Marshalled code object data.\n # So to get code object, just read the 8th byte onwards till EOF, and\n # UN-marshal it.\n import marshal\n code_obj = marshal.loads(data[8:])\n\n elif result is PY_SOURCE:\n # This is a .py file.\n code_obj = compile(data, fullpath, 'exec')\n\n else:\n # Unsupported extension\n raise Exception(\"Input file is unknown format: {0}\".format(fullpath))\n\n # Return code object\n return code_obj", "def extract_code(self, data):\n current = struct.calcsize(b'iiii')\n metadata = struct.unpack(b'iiii', data[:current])\n\n if metadata[0] != 0x78563412:\n raise InvalidPy2ExeFile(\"Invalid PYTHONSCRIPT header\")\n\n arcname = ''\n while six.indexbytes(data, current) != 0:\n arcname += chr(six.indexbytes(data, current))\n current += 1\n code_bytes = data[current + 1:]\n code_objects = marshal.loads(code_bytes)\n return code_objects", "def _read_code(self):\n\n if self.fileType == FTPythonCompiled:\n # It's a pyc file; just read it directly.\n pycVfile = vfs.getFile(self.filename, False)\n if pycVfile:\n return self._loadPyc(pycVfile, None)\n return None\n\n elif self.fileType == FTCompiledModule:\n return None\n\n # It's a .py file (or an __init__.py file; same thing). Read\n # the .pyc file if it is available and current; otherwise read\n # the .py file and compile it.\n pycFilename = Filename(self.filename)\n pycFilename.setExtension(pycExtension)\n pycVfile = vfs.getFile(pycFilename, False)\n t_pyc = None\n if pycVfile:\n t_pyc = pycVfile.getTimestamp()\n\n code = None\n if t_pyc and t_pyc >= self.timestamp:\n code = self._loadPyc(pycVfile, self.timestamp)\n\n if not code:\n source = self._read_source()\n filename = Filename(self.filename)\n filename.setExtension('py')\n code = self._compile(filename, source)\n\n return code", "def get_data(self, filename):\n pyfile = filename + '.py'\n if not os.path.exists(pyfile) or os.path.getmtime(pyfile) < os.path.getmtime(filename):\n # recompile the Haxe file\n status = subprocess.call([self.haxe_bin, '-cp', os.path.dirname(filename), os.path.basename(filename), '-python', pyfile])\n if status:\n raise ImportError(\"Haxe compilation of {} failed with status {}\".format(filename, status))\n with open(pyfile) as f:\n data = f.read()\n return data", "def load_clips():\n try:\n with open(DATA_FILE, 'r') as f:\n return msgpack.unpack(f, encoding='utf-8')\n except IOError:\n return {}", "def get_code(self, fullname: str) -> Any:\n path = self.get_filename(fullname)\n data = self.get_data(path)\n\n # print(\"XPYCE FileLoader Fullname: {}, path from get_filename is {}\".format(fullname, path))\n # It is important to normalize path case for platforms like Windows\n decryption_key = None\n for prefix in PREFIXES:\n if decryption_key:\n break\n lookup_module = relpath(path, start=prefix).replace('/', '.').replace('\\\\', '.')\n for module in XPYCEPathFinder.KEYS:\n if module == lookup_module:\n decryption_key = XPYCEPathFinder.KEYS[module]\n break\n if not decryption_key:\n raise KeyError(\"Cannot find decryption_key for module '{}'\".format(fullname))\n\n try:\n data = decrypt(data, decryption_key)\n except Exception as e:\n print(\"Could not decrypt module '{}' with provided decryption_key\".format(fullname))\n raise e\n\n # .pyc changed from 3 32-bit words to 4 32-bit words with Python3.7\n # Skip over the header to get to the raw data\n\n if sys.version_info.minor < 7:\n bytes_data = data[12:]\n else:\n bytes_data = data[16:]\n\n return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path)", "def dump_to_pyc(co, python_version, output_dir):\n # assume Windows path information from the .exe\n pyc_basename = ntpath.basename(co.co_filename)\n pyc_name = f'{pyc_basename}.pyc'\n\n if pyc_name not in IGNORE:\n logging.info(\"Extracting %s\", pyc_name)\n pyc_header = _generate_pyc_header(python_version, len(co.co_code))\n destination = os.path.join(output_dir, pyc_name)\n with open(destination, 'wb') as pyc:\n pyc.write(pyc_header)\n marshaled_code = marshal.dumps(co)\n pyc.write(marshaled_code)\n else:\n logging.info(\"Skipping %s\", pyc_name)", "def _read_source(self):\n \n if self.fileType == FTPythonCompiled or \\\n self.fileType == FTCompiledModule:\n return None\n \n filename = Filename(self.filename)\n filename.setExtension('py')\n try:\n file = open(filename, 'rU')\n except IOError:\n return None\n return file.read()", "def get_binary(fname):\n with open(fname, 'rb+') as f_name:\n data = f_name.read()\n return data", "def dump_pyc(self, co, output_dir):\n pyc_basename = ntpath.basename(co.co_filename)\n if pyc_basename in self.ignore:\n return\n pyc_name = pyc_basename + '.pyc'\n\n # Rebuild PYC header\n version = imp.get_magic()\n version_tuple = sys.version_info\n today = time.time()\n header = version + struct.pack(b'=L', int(today))\n if version_tuple[0] == 3 and version_tuple[1] >= 3:\n header += struct.pack(b'=L', len(co.co_code))\n\n # Write to file\n destination = os.path.join(output_dir, pyc_name)\n pyc = open(destination, 'wb')\n pyc.write(header)\n marshaled_code = marshal.dumps(co)\n pyc.write(marshaled_code)\n pyc.close()\n return destination", "def _code_to_file(co):\n if sys.version_info >= (3, 7):\n header = imp.get_magic() + (b'\\0' * 12)\n elif sys.version_info >= (3, 4):\n header = imp.get_magic() + (b'\\0' * 8)\n else:\n header = imp.get_magic() + (b'\\0' * 4)\n return BytesIO(header + marshal.dumps(co))", "def readbytes(self, *args) -> \"PyObject *\":\n return _ida_fpro.qfile_t_readbytes(self, *args)", "def read_pc_file(filename, global_variables):\n ErrorPrinter().set_variable('filename', filename)\n ErrorPrinter().debug_print('Parsing %(filename)')\n pcfile = open(filename, 'r')\n lines = pcfile.readlines()\n if not lines:\n raise EmptyPackageFileError(filename)\n raw_vars, vars, props = parse_pc_file_lines(lines, global_variables)\n pcfile.close()\n return raw_vars, vars, props", "def open_ponto(fname):\n MAGIC = 7856871\n fobj = open(fname, 'rb')\n\n # read the format code\n fbytes = fobj.read(4)\n (fcode,) = struct.unpack('i',fbytes)\n if fcode != MAGIC:\n (fcode,) = struct.unpack('>i',fbytes)\n if fcode != MAGIC:\n fobj.close()\n raise CppError('open_ponto: could not recognise first 4 bytes of ' + fname + ' as a Ponto file')\n endian = '>'\n else:\n endian = ''\n return (fobj,endian)", "def load_data(self):\n return self._load_data(\"--codemeta-file\")", "def code(self):\r\n if (os.path.exists(self._bytecode_path) and\r\n os.path.getmtime(self.full_path) <= os.path.getmtime(self._bytecode_path)):\r\n with open(self._bytecode_path, 'rb') as bytecode:\r\n return marshal.load(bytecode)\r\n else:\r\n with open(self.full_path, 'rb') as source:\r\n code = compile(source.read(), self.full_path, 'exec')\r\n with open(self._bytecode_path, 'wb') as bytecode:\r\n marshal.dump(code, bytecode)\r\n return code", "def _ReadCoverageInfoEntry(data_file):\n\n UINT32_SIZE = 4\n\n pkt_size_buf = data_file.read(UINT32_SIZE)\n if len(pkt_size_buf) != UINT32_SIZE:\n raise ValueError(\"Invalid packet size read.\")\n\n pkt_size = struct.unpack(\"I\", pkt_size_buf)[0]\n\n pkt = data_file.read(pkt_size)\n\n if len(pkt) != pkt_size:\n raise ValueError(\"Incomplete packet.\")\n\n return pkt", "def get_dll():\r\n with open(\"TcpServer.dll\", \"rb\") as m_file:\r\n return m_file.read()", "def read_symbols(filename):\n filename = as_str(filename)\n cdef ifstream* fstream = new ifstream(filename)\n cdef SymbolTable table = SymbolTable.__new__(SymbolTable)\n table.table = sym.SymbolTableRead(fstream[0], filename)\n del fstream\n return table", "def openFileRd(in_file):\n try:\n # First see if this file is gzipped\n try:\n # Opening the file works even if it is not a gzip file\n proto_in = gzip.open(in_file, \"rb\")\n\n # Force a check of the magic number by seeking in the\n # file. If we do not do it here the error will occur when\n # reading the first message.\n proto_in.seek(1)\n proto_in.seek(0)\n except IOError:\n proto_in = open(in_file, \"rb\")\n except IOError:\n print(\"Failed to open \", in_file, \" for reading\")\n exit(-1)\n return proto_in", "def readLib(self):\n\t\tdata = self._fileSystem.readLib()\n\t\tif data is None:\n\t\t\treturn\n\t\treturn data", "def load_python_data(path):\n data = []\n with codecs.open(path, encoding='UTF-8', mode='r') as fi:\n for line in fi:\n data.append(eval(line))\n return data", "def read_file(self, filename):\n import pycbf\n self.cbf_handle = pycbf.cbf_handle_struct()\n self.cbf_handle.read_file(filename, pycbf.MSG_DIGEST)\n self.cbf_handle.rewind_datablock()", "def getBinaryData(self, filepath):\n\n binary_values = []\n\n with open(filepath, 'rb') as fileobject:\n\n # read file byte by byte\n data = fileobject.read(1)\n\n while data != b'':\n binary_values.append(ord(data))\n data = fileobject.read(1)\n\n return binary_values", "def read_data(cls, input_file,quotechar = None):\n if 'pkl' in str(input_file):\n lines = load_pickle(input_file)\n else:\n lines = input_file\n return lines", "def _read_pkl(self, input_file):\n data = pickle.load(open(input_file, 'rb'))\n return data", "def read_file(file_path):\n scan = nib.load(filename=file_path)\n scan = scan.get_fdata()\n return scan", "def read_data(self, content_path):\n\n if not os.path.basename(content_path).endswith(\".dat\"):\n raise ValueError(\"this content path is not a data file\")\n\n try:\n # read binary data\n data = self._zip_file.read(content_path)\n\n # decode using big-endian integer\n result = []\n for i in range(int(len(data) / 4)):\n result.append(unpack('!i', data[i * 4:(i + 1) * 4]))\n\n # returning integer-encoded raw data vector\n return np.array(result)\n except IOError:\n print(\"can't read data file\")", "def readin(pythonfilename):\n with open(pythonfilename) as f:\n code = f.read()\n FuncLister().visit(ast.parse(code))" ]
[ "0.65662557", "0.6384193", "0.6341848", "0.611631", "0.5743764", "0.57398134", "0.5732921", "0.56879", "0.55201936", "0.54984534", "0.54814845", "0.5436405", "0.54362696", "0.5424923", "0.540435", "0.53719556", "0.5361285", "0.5304752", "0.5267074", "0.52599674", "0.52564466", "0.5243898", "0.52294433", "0.522282", "0.52109474", "0.5206465", "0.51881576", "0.5185976", "0.5183237", "0.51673776" ]
0.6452861
1
Compiles the Python source code to a code object and attempts to write it to an appropriate .pyc file.
def _compile(self, filename, source): if source and source[-1] != '\n': source = source + '\n' code = __builtin__.compile(source, filename.cStr(), 'exec') # try to cache the compiled code pycFilename = Filename(filename) pycFilename.setExtension(pycExtension) try: f = open(pycFilename, 'wb') except IOError: pass else: f.write('\0\0\0\0') f.write(struct.pack('<I', self.timestamp)) f.write(marshal.dumps(code)) f.flush() f.seek(0, 0) f.write(imp.get_magic()) f.close() return code
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _compile(self, source: str, filename: str) -> CodeType:\n return compile(source, filename, \"exec\") # type: ignore", "def pyo():\n local('python -O -m compileall .')", "def pyo():\n local('python -O -m compileall .')", "def _get_codeobj(pyfile):\n from imp import PY_COMPILED, PY_SOURCE\n\n result, fileobj, fullpath = _check_if_pyc(pyfile)\n\n # WARNING:\n # fp.read() can blowup if the module is extremely large file.\n # Lookout for overflow errors.\n try:\n data = fileobj.read()\n finally:\n fileobj.close()\n\n # This is a .pyc file. Treat accordingly.\n if result is PY_COMPILED:\n # .pyc format is as follows:\n # 0 - 4 bytes: Magic number, which changes with each create of .pyc file.\n # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is \"\\r\\n\".\n # 4 - 8 bytes: Datetime value, when the .py was last changed.\n # 8 - EOF: Marshalled code object data.\n # So to get code object, just read the 8th byte onwards till EOF, and\n # UN-marshal it.\n import marshal\n code_obj = marshal.loads(data[8:])\n\n elif result is PY_SOURCE:\n # This is a .py file.\n code_obj = compile(data, fullpath, 'exec')\n\n else:\n # Unsupported extension\n raise Exception(\"Input file is unknown format: {0}\".format(fullpath))\n\n # Return code object\n return code_obj", "def compile_code():\n pyc_cmd = \"python3 ../course/common/compiler.py \"\n\n with open('log.out', 'w+', encoding=\"utf-8\") as f:\n subprocess.call(shlex.split(pyc_cmd + './student/'), universal_newlines=True, stderr=f)\n f.seek(0)\n out_student = f.read()\n\n if out_student != \"\":\n rawhtml = rst.get_codeblock(\"\", out_student)\n feedback.set_global_result('failed')\n feedback.set_global_feedback(_(\"Your program does not compile: \\n \") + rawhtml + \"\\n\")\n sys.exit(0)", "def code(self):\r\n if (os.path.exists(self._bytecode_path) and\r\n os.path.getmtime(self.full_path) <= os.path.getmtime(self._bytecode_path)):\r\n with open(self._bytecode_path, 'rb') as bytecode:\r\n return marshal.load(bytecode)\r\n else:\r\n with open(self.full_path, 'rb') as source:\r\n code = compile(source.read(), self.full_path, 'exec')\r\n with open(self._bytecode_path, 'wb') as bytecode:\r\n marshal.dump(code, bytecode)\r\n return code", "def dump_pyc(self, co, output_dir):\n pyc_basename = ntpath.basename(co.co_filename)\n if pyc_basename in self.ignore:\n return\n pyc_name = pyc_basename + '.pyc'\n\n # Rebuild PYC header\n version = imp.get_magic()\n version_tuple = sys.version_info\n today = time.time()\n header = version + struct.pack(b'=L', int(today))\n if version_tuple[0] == 3 and version_tuple[1] >= 3:\n header += struct.pack(b'=L', len(co.co_code))\n\n # Write to file\n destination = os.path.join(output_dir, pyc_name)\n pyc = open(destination, 'wb')\n pyc.write(header)\n marshaled_code = marshal.dumps(co)\n pyc.write(marshaled_code)\n pyc.close()\n return destination", "def _compile_C_code(header, body, return_unloaded=False, verbose=False):\n import importlib\n import tempfile\n import uuid\n\n import cffi\n\n module_name = \"module_\" + uuid.uuid4().hex\n\n if \"__uint128\" in header:\n raise ValueError(\"_compile_C_code does not support bit-vector widths \"\n \"larger than 64 bits (cffi does not support __uint128)\")\n\n ffibuilder = cffi.FFI()\n ffibuilder.cdef(header)\n ffibuilder.set_source(module_name, body)\n\n tmpdir = tempfile.TemporaryDirectory()\n lib_path = ffibuilder.compile(tmpdir=tmpdir.name, verbose=verbose)\n\n if return_unloaded:\n return lib_path, module_name, tmpdir\n\n # dynamic import\n # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n spec = importlib.util.spec_from_file_location(module_name, lib_path)\n pymod_parent = importlib.util.module_from_spec(spec)\n # sys.modules[module_name] = module\n spec.loader.exec_module(pymod_parent)\n\n pymod = pymod_parent\n\n return pymod, tmpdir", "def dump_to_pyc(co, python_version, output_dir):\n # assume Windows path information from the .exe\n pyc_basename = ntpath.basename(co.co_filename)\n pyc_name = f'{pyc_basename}.pyc'\n\n if pyc_name not in IGNORE:\n logging.info(\"Extracting %s\", pyc_name)\n pyc_header = _generate_pyc_header(python_version, len(co.co_code))\n destination = os.path.join(output_dir, pyc_name)\n with open(destination, 'wb') as pyc:\n pyc.write(pyc_header)\n marshaled_code = marshal.dumps(co)\n pyc.write(marshaled_code)\n else:\n logging.info(\"Skipping %s\", pyc_name)", "def _pyCompile ( target, source, env ) :\n if len(target) != 1 :\n fail ( \"unexpected number of targets for pyCompile: \"+str(target) )\n if len(source) != 1 :\n fail ( \"unexpected number of sources for pyCompile: \"+str(source) )\n\n target = str(target[0])\n source = str(source[0])\n trace ( \"Executing pycompile `%s'\" % ( source ), \"pyCompile\", 3 )\n\n try :\n import py_compile\n py_compile.compile ( source, target, doraise = True )\n except py_compile.PyCompileError, e :\n print str(e)\n return -1", "def visit_Python(self, node):\n # This compiles the given Python ast into a Python code object\n # then disassembles it into a byteplay code object. This allows\n # us to interleave the instructions with those generated for\n # the rest of the module and then compile a single unified \n # code object.\n py_code = compile(node.py_ast, self.filename, mode='exec')\n bpc = Code.from_code(py_code)\n # Skip the SetLineo and ReturnValue codes\n self.code_ops.extend(bpc.code[1:-2])", "def source_to_code(self, data, path):\n\t\treturn _call_with_frames_removed(compile, data, path, 'exec', dont_inherit=True)", "def _make_source(name, init, body):\n code = \"\"\"\n #include <Python.h>\n\n %(body)s\n\n PyMODINIT_FUNC\n PyInit_%(name)s(void) {\n %(init)s\n }\n \"\"\" % dict(\n name=name, init=init, body=body,\n )\n return code", "def _code_to_file(co):\n if sys.version_info >= (3, 7):\n header = imp.get_magic() + (b'\\0' * 12)\n elif sys.version_info >= (3, 4):\n header = imp.get_magic() + (b'\\0' * 8)\n else:\n header = imp.get_magic() + (b'\\0' * 4)\n return BytesIO(header + marshal.dumps(co))", "async def _compile(ctx, code: Option(str, \"Brainfuck code to compile into python\")):\n compiled = bot.brainfuck.compile(code)\n await send_code(ctx, compiled.code, lang=\"py\")", "def write_c_source(self, dst):\n wfd = open(dst, \"wt\")\n wfd.write(self.generate_c_source())\n wfd.close()", "def get_code(self, fullmodname):\n submodname, is_package, fullpath, source = self._get_source(fullmodname)\n return compile(source, fullpath, 'exec')", "def get_code(self, fullname):\n source_path = self.get_filename(fullname)\n source_bytes = self.get_data(source_path)\n return compile(source_bytes, source_path, 'exec',\n dont_inherit=True)", "def compile_python(self):\n if(self.input == \"\"):\n stdout = subprocess.run(\n [\"python\", self.id+\".py\"], stdout=subprocess.PIPE).stdout.decode('utf-8')\n self.output = stdout\n if(len(stdout) == 0):\n self.output = subprocess.run(\n [\"python\", self.id+\".py\"], stderr=subprocess.PIPE).stderr.decode('utf-8')\n self.status = 0 # error\n else:\n self.status = 1 # success\n else:\n pass", "def dump_compiler(input_bytes):\n return dump_from_release(input_bytes, \"compiler\")", "def _read_code(self):\n\n if self.fileType == FTPythonCompiled:\n # It's a pyc file; just read it directly.\n pycVfile = vfs.getFile(self.filename, False)\n if pycVfile:\n return self._loadPyc(pycVfile, None)\n return None\n\n elif self.fileType == FTCompiledModule:\n return None\n\n # It's a .py file (or an __init__.py file; same thing). Read\n # the .pyc file if it is available and current; otherwise read\n # the .py file and compile it.\n pycFilename = Filename(self.filename)\n pycFilename.setExtension(pycExtension)\n pycVfile = vfs.getFile(pycFilename, False)\n t_pyc = None\n if pycVfile:\n t_pyc = pycVfile.getTimestamp()\n\n code = None\n if t_pyc and t_pyc >= self.timestamp:\n code = self._loadPyc(pycVfile, self.timestamp)\n\n if not code:\n source = self._read_source()\n filename = Filename(self.filename)\n filename.setExtension('py')\n code = self._compile(filename, source)\n\n return code", "def compile(self, code, options=''):\n try:\n data = self.client.cli.compile_contract(body=dict(\n code=code,\n options=options\n ))\n return data.bytecode\n except OpenAPIClientException as e:\n raise ContractError(e)", "def compile(path: str) -> bytes:\n if not path.endswith('.py'):\n raise InvalidPathException(path)\n\n return Compiler().compile(path)", "def visit_Python(self, node):\n py_code = compile(node.py_ast, self.filename, mode='exec')\n bp_code = Code.from_code(py_code)\n # Skip the SetLineo and ReturnValue codes\n self.code_ops.extend(bp_code.code[1:-2])", "def code_generate(env, script, target, source, command):\n\n # We're generating code using Python scripts, so we have to be\n # careful with our scons elements. This entry represents\n # the generator file *in the source directory*.\n script_src = env.File(script).srcnode()\n\n # This command creates generated code *in the build directory*.\n command = command.replace('$SCRIPT', script_src.path)\n code = env.Command(target, source, command)\n\n # Explicitly mark that the generated code depends on the generator,\n # and on implicitly imported python modules\n path = (script_src.get_dir(),)\n deps = [script_src]\n deps += script_src.get_implicit_deps(env, python_scanner, path)\n env.Depends(code, deps)\n\n # Running the Python script causes .pyc files to be generated in the\n # source directory. When we clean up, they should go too. So add side\n # effects for .pyc files\n for dep in deps:\n pyc = env.File(str(dep) + 'c')\n env.SideEffect(pyc, code)\n\n return code", "def compileModule(self, code):\n r = ast.Module(None, self.compileSuite(code))\n #print r\n return r", "def compileCode(pretext, codetext, filename):\n\n try:\n if codetext:\n co = compile(codetext, filename, \"exec\")\n o = [ pretext, co, codetext ]\n else:\n o = [ pretext, None, codetext ]\n except:\n o = [ pretext, None, codetext ]\n\n print >> sys.stderr, \\\n \"Error compiling template in the following code:\"\n print >> sys.stderr, codetext\n\n try:\n etype, value, tb = sys.exc_info()\n print_exception(etype, value, tb, None, sys.stderr)\n finally:\n etype = value = tb = None\n if not opts.ignore_errors:\n errors = 1\n\n print >> sys.stderr\n return o", "def compile_simple(py_ast, filename):\n code = compile(py_ast, filename, mode='eval')\n code = update_firstlineno(code, py_ast.lineno)\n bp_code = Code.from_code(code)\n replace_global_loads(bp_code.code)\n optimize_locals(bp_code.code)\n bp_code.newlocals = False\n return bp_code.to_code()", "def compile(self):\n return None # pragma: no cover", "def compile_c(self):\n if(self.input == \"\"):\n stderr = subprocess.run(\n [\"gcc\", self.id+\".c\", \"-o\", self.id+\"_c\"], stderr=subprocess.PIPE).stderr.decode('utf-8')\n if(len(stderr) == 0):\n self.status = 1\n stdout = subprocess.run(\n [\"./\"+self.id+\"_c\"], stdout=subprocess.PIPE).stdout.decode('utf-8')\n self.output = stdout\n else:\n self.status = 0\n self.output = stderr\n else:\n pass" ]
[ "0.7162829", "0.71481526", "0.71481526", "0.7095582", "0.7006457", "0.69402754", "0.6847345", "0.6822981", "0.6776377", "0.6746384", "0.6662565", "0.6629363", "0.65639025", "0.63957757", "0.6383215", "0.6376323", "0.6361882", "0.626", "0.62477124", "0.6155028", "0.6153046", "0.60825187", "0.6079766", "0.6078048", "0.6056936", "0.6051101", "0.6044784", "0.6039808", "0.6011722", "0.60031396" ]
0.7598696
0
Register the VFSImporter on the path_hooks, if it has not already been registered, so that future Python import statements will vector through here (and therefore will take advantage of Panda's virtual file system).
def register(): global _registered if not _registered: _registered = True sys.path_hooks.insert(0, VFSImporter)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_finder(importer, finder):\r\n\r\n existing_finder = _get_finder(importer)\r\n\r\n if not existing_finder:\r\n pkg_resources.register_finder(importer, finder)\r\n else:\r\n pkg_resources.register_finder(importer, ChainedFinder.of(existing_finder, finder))", "def register_finders():\r\n\r\n # If the previous finder is set, then we've already monkeypatched, so skip.\r\n global __PREVIOUS_FINDER\r\n if __PREVIOUS_FINDER:\r\n return\r\n\r\n # save previous finder so that it can be restored\r\n previous_finder = _get_finder(zipimport.zipimporter)\r\n assert previous_finder, 'This appears to be using an incompatible setuptools.'\r\n\r\n # replace the zip finder with our own implementation of find_eggs_in_zip which uses the correct\r\n # metadata handler, in addition to find_wheels_in_zip\r\n pkg_resources.register_finder(\r\n zipimport.zipimporter, ChainedFinder.of(find_eggs_in_zip, find_wheels_in_zip))\r\n\r\n # append the wheel finder\r\n _add_finder(pkgutil.ImpImporter, find_wheels_on_path)\r\n\r\n if importlib_bootstrap is not None:\r\n _add_finder(importlib_bootstrap.FileFinder, find_wheels_on_path)\r\n\r\n __PREVIOUS_FINDER = previous_finder", "def register(self):\n REGISTERED_FUNCTIONS[self.path] = self", "def predicated_path_hook_for_FileFinder(path):\n if not os.path.isdir(path):\n raise ImportError(\"only directories are supported\")\n if not predicate(path):\n raise ImportError(\"predicate not satisfied\")\n return cls(path, *a, **kw)", "def RegisterService():\n hooks.RegisterHook(SERVICE_NAME, 'file-exists', hook_class=HookForExists)\n hooks.RegisterHook(SERVICE_NAME, 'file-write',\n hook_class=HookForWriteAndTouch)\n hooks.RegisterHook(SERVICE_NAME, 'file-touch',\n hook_class=HookForWriteAndTouch)\n hooks.RegisterHook(SERVICE_NAME, 'file-get', hook_class=HookForGet)\n hooks.RegisterHook(SERVICE_NAME, 'list-files', hook_class=HookForListFiles)\n hooks.RegisterHook(SERVICE_NAME, 'list-dir', hook_class=HookForListDir)", "def registerSLPlugin(self, tag, handler_fn):\n self.sl_plugins[tag] = handler_fn", "def register(self):\n\n @asyncio.coroutine\n def on_prepare(xact_info, action, ks_path, msg):\n \"\"\" prepare callback from dts \"\"\"\n self._log.debug(\n \"Got vlr on_prepare callback (xact_info: %s, action: %s): %s\",\n xact_info, action, msg\n )\n raise NotImplementedError(\n \"%s action on VirtualLinkRecord not supported\",\n action)\n\n self._log.debug(\"Registering for VLR using xpath: %s\",\n VlrPublisherDtsHandler.XPATH,)\n\n hdl = rift.tasklets.DTS.RegistrationHandler()\n with self._dts.group_create() as group:\n self._regh = group.register(xpath=VlrPublisherDtsHandler.XPATH,\n handler=hdl,\n flags=(rwdts.Flag.PUBLISHER |\n rwdts.Flag.NO_PREP_READ |\n rwdts.Flag.CACHE),)", "def add_pipelines_path(path):\n added = _add_lookup_path(path, _PIPELINES_PATHS)\n if added:\n LOGGER.debug('New pipelines path added: %s', path)", "def set_plugin_path(self, path):\n ckresult(_dll.FMOD_System_SetPluginPath(self._ptr, path))", "def add_hook(**_kwargs):\n hook = import_hook.create_hook(\n transform_source=transform_source,\n transform_ast=transform_ast,\n hook_name=__name__,\n extensions=[\".🐍\"],\n )\n return hook", "def install(self, directories):\n if isinstance(directories, str):\n directories = [directories]\n\n def predicate(path):\n \"\"\"Match any directory or subdirectory of `directories`.\"\"\"\n p = os.path.abspath(path)\n return any(p == d or p.startswith(d + os.path.sep)\n for d in directories)\n\n # Add our custom path hook to the list of system imports.\n path_hook = Runtime2to3FileFinder.predicated_path_hook(\n predicate, self.refactoring_tool, self.tag)\n sys.path_hooks.insert(0, path_hook)\n sys.path_importer_cache.clear()", "def predicated_path_hook(cls, predicate, *a, **kw):\n def predicated_path_hook_for_FileFinder(path):\n \"\"\"path hook for FileFinder\"\"\"\n if not os.path.isdir(path):\n raise ImportError(\"only directories are supported\")\n if not predicate(path):\n raise ImportError(\"predicate not satisfied\")\n return cls(path, *a, **kw)\n return predicated_path_hook_for_FileFinder", "def register ():\n dsf_geom_import.register ()\n dsf_morph_import.register ()\n dsf_morph_export.register ()\n dsf_uvset_import.register ()\n dsf_arm_import.register ()\n dsf_pose_import.register ()\n dsf_wm_import.register ()\n dsf_geom_export.register ()\n dsf_prop_export.register ()", "def register_post_import_hook(name, hook):\n # Automatically install the import hook finder if it has not already\n # been installed.\n global _post_import_hooks_init\n\n if not _post_import_hooks_init:\n _post_import_hooks_init = True\n sys.meta_path.insert(0, ImportHookFinder())\n\n hooks = _post_import_hooks.get(name, [])\n\n if hook in hooks:\n log.debug('hook \"%s\" already exists on module \"%s\"', hook, name)\n return\n\n module = sys.modules.get(name, None)\n\n # If the module has been imported already fire the hook and log a debug msg.\n if module:\n log.debug('module \"%s\" already imported, firing hook', name)\n hook(module)\n\n hooks.append(hook)\n _post_import_hooks[name] = hooks", "def create_hookdir_refs ( self,\n hook_dir, overwrite=False, compare_fspath=True\n ):\n for event, user_script in self.iter_scripts():\n if overwrite or not user_script.has_hookscript():\n try:\n hook = hook_dir.get_script ( user_script.name )\n except KeyError:\n pass\n else:\n if hook is not None and (\n not compare_fspath or user_script.fspath == hook.fspath\n ):\n user_script.set_hookscript ( hook )", "def test_sys_metapath_hooks_specs():\n \n finder = Finder()\n assert type(finder.loader) is Loader\n # assert type(finder) in sys.meta_path\n print('sys.meta_path:')\n pprint(sys.meta_path)\n\n spec0 = finder.find_spec('clu.app.FindMe', [])\n assert spec0.name == 'clu.app.FindMe'\n\n module0 = finder.loader.create_module(spec0)\n assert isinstance(module0, (FindMe, ModuleBase))\n\n module1 = finder.loader.create_module(spec0)\n assert isinstance(module1, (FindMe, ModuleBase))\n\n spec1 = finder.find_spec('clu.app.FindMe', [])\n assert spec1.name == 'clu.app.FindMe'\n \n registered = Registry.for_qualname('clu.app.FindMe')\n assert nameof(registered) == nameof(FindMe)", "def mock_path_hook(*entries, importer):\n def hook(entry):\n if entry not in entries:\n raise ImportError\n return importer\n return hook", "def setup_hooks(self):\n pass", "def load_zope(finder, module):\n module.ExtendPath()", "def setup_script_registry():\n for module in pkgutil.iter_modules(parlai.scripts.__path__, 'parlai.scripts.'):\n importlib.import_module(module.name)\n try:\n import parlai_fb.scripts\n\n for module in pkgutil.iter_modules(\n parlai_fb.scripts.__path__, 'parlai_fb.scripts.'\n ):\n importlib.import_module(module.name)\n except ImportError:\n pass\n try:\n import parlai_internal.scripts\n\n for module in pkgutil.iter_modules(\n parlai_internal.scripts.__path__, 'parlai_internal.scripts.'\n ):\n importlib.import_module(module.name)\n except ImportError:\n pass", "def register_binary_search_path(search_path):\n # pylint: disable=protected-access\n moved_in_v7_warning(\"register_binary_search_path is now a View method\")\n SpynnakerDataView.register_binary_search_path(search_path)", "def register_environment_functions(self):\n self.environment.globals['path_join'] = path_join\n self.environment.globals['path_normalize'] = path_normalize", "def register(self):\n\n @asyncio.coroutine\n def on_prepare(xact_info, action, ks_path, msg):\n \"\"\" prepare callback from dts \"\"\"\n self._log.debug(\n \"Got vnfr on_prepare callback (xact_info: %s, action: %s): %s\",\n xact_info, action, msg\n )\n raise NotImplementedError(\n \"%s action on VirtualNetworkFunctionRecord not supported\",\n action)\n\n self._log.debug(\"Registering for VNFR using xpath: %s\",\n VnfrPublisherDtsHandler.XPATH,)\n\n hdl = rift.tasklets.DTS.RegistrationHandler()\n with self._dts.group_create() as group:\n self._regh = group.register(xpath=VnfrPublisherDtsHandler.XPATH,\n handler=hdl,\n flags=(rwdts.Flag.PUBLISHER |\n rwdts.Flag.NO_PREP_READ |\n rwdts.Flag.CACHE),)", "def make_hookdir_refs ( self, hook_dir, overwrite=False ):\n # try exact fs path matches first, then use name-based ones\n self.create_hookdir_refs (\n hook_dir, overwrite=overwrite, compare_fspath=True\n )\n self.create_hookdir_refs (\n hook_dir, overwrite=overwrite, compare_fspath=False\n )", "def _install_ff_locally(self, path, ff_exe):\n\n if sys.platform.startswith('win'):\n # Windows: copy the whole tuntime\n copy_xul_runtime(op.dirname(ff_exe), path)\n else:\n # OSX / Linux: create a symlink to xul runtime exe\n os.mkdir(path)\n stub_exe = op.join(path, 'xulrunner')\n os.symlink(ff_exe, stub_exe)\n return stub_exe", "def pytest_addhooks(pluginmanager):\n from . import newhooks\n\n pluginmanager.add_hookspecs(newhooks)", "def configure_zpt_renderer(search_path=()):\n\n default_paths = deform.form.Form.default_renderer.loader.search_path\n paths = []\n for path in search_path:\n pkg, resource_name = path.split(':')\n paths.append(resource_filename(pkg, resource_name))\n\n deform.form.Form.default_renderer = deform.ZPTRendererFactory(tuple(paths) + default_paths)", "def _do_install_hook(self, args):\r\n hook_name = args[1]\r\n fct_name = args[2]\r\n hooks.install_hook(hook_name, self._hooks_fct[fct_name])", "def add_search_path(*path_tokens):\n full_path = os.path.join(*path_tokens)\n if full_path not in sys.path:\n sys.path.insert(0, os.path.abspath(full_path))", "def add_module(self, *args, **kwargs):\n# if 'path' in kwargs:\n# path = kwargs['path']\n# else:\n# path = os.getcwd()\n#\n# if len(args) > 0:\n# module = args[0]\n# elif 'module' in kwargs:\n# module = kwargs['module']\n#\n# if 'path' not in kwargs:\n# path = os.getcwd()\n# kwargs['path'] = path\n\n if 'module' not in kwargs:\n if len(args) > 0:\n module = args[0]\n kwargs['module'] = module\n\n# if 'module' in kwargs:\n if len(kwargs) > 0:\n self._data.add_detector(self._name, **kwargs)" ]
[ "0.6036175", "0.5240254", "0.5236929", "0.51234156", "0.5085861", "0.5058323", "0.5038346", "0.50234586", "0.50069255", "0.4978417", "0.49670815", "0.49526778", "0.48803136", "0.48607883", "0.4822473", "0.4798947", "0.47953275", "0.4795149", "0.47560418", "0.4753519", "0.47291508", "0.47138333", "0.46816048", "0.4670775", "0.46539602", "0.46458963", "0.46420658", "0.46284142", "0.46282113", "0.4628018" ]
0.8060369
0
Launch an application that reads from a webcam and estimates hand pose at realtime. The captured hand must be the right hand, but will be flipped internally and rendered.
def live_application(capture): ############ output visualization ############ view_mat = axangle2mat([1, 0, 0], np.pi) # align different coordinate systems window_size = 1080 hand_mesh = HandMesh(config.HAND_MESH_MODEL_PATH) mesh = o3d.geometry.TriangleMesh() mesh.triangles = o3d.utility.Vector3iVector(hand_mesh.faces) mesh.vertices = \ o3d.utility.Vector3dVector(np.matmul(view_mat, hand_mesh.verts.T).T * 1000) mesh.compute_vertex_normals() viewer = o3d.visualization.Visualizer() viewer.create_window( width=window_size + 1, height=window_size + 1, window_name='Minimal Hand - output' ) viewer.add_geometry(mesh) view_control = viewer.get_view_control() cam_params = view_control.convert_to_pinhole_camera_parameters() extrinsic = cam_params.extrinsic.copy() extrinsic[0:3, 3] = 0 cam_params.extrinsic = extrinsic cam_params.intrinsic.set_intrinsics( window_size + 1, window_size + 1, config.CAM_FX, config.CAM_FY, window_size // 2, window_size // 2 ) view_control.convert_from_pinhole_camera_parameters(cam_params) view_control.set_constant_z_far(1000) render_option = viewer.get_render_option() render_option.load_from_json('./render_option.json') viewer.update_renderer() ############ input visualization ############ pygame.init() display = pygame.display.set_mode((window_size, window_size)) pygame.display.set_caption('Minimal Hand - input') ############ misc ############ mesh_smoother = OneEuroFilter(4.0, 0.0) clock = pygame.time.Clock() model = ModelPipeline() while True: frame_large = capture.read() if frame_large is None: continue if frame_large.shape[0] > frame_large.shape[1]: margin = int((frame_large.shape[0] - frame_large.shape[1]) / 2) frame_large = frame_large[margin:-margin] else: margin = int((frame_large.shape[1] - frame_large.shape[0]) / 2) frame_large = frame_large[:, margin:-margin] frame_large = np.flip(frame_large, axis=1).copy() frame = imresize(frame_large, (128, 128)) xyz, theta_mpii = model.process(frame) fixed_heatmap = (xyz * 1/2 + 1) * 200 handimg = np.ones(shape=(300, 300, 3), dtype='uint8') * 255 plot_hand(fixed_heatmap, None, handimg) cv2.imshow('heatmap', handimg) cv2.waitKey(50) theta_mano = mpii_to_mano(theta_mpii) v = hand_mesh.set_abs_quat(theta_mano) v *= 2 # for better visualization v = v * 1000 + np.array([0, 0, 400]) v = mesh_smoother.process(v) mesh.triangles = o3d.utility.Vector3iVector(hand_mesh.faces) mesh.vertices = o3d.utility.Vector3dVector(np.matmul(view_mat, v.T).T) mesh.paint_uniform_color(config.HAND_COLOR) mesh.compute_triangle_normals() mesh.compute_vertex_normals() print(np.array(mesh.vertices)) # for some version of open3d you may need `viewer.update_geometry(mesh)` viewer.update_geometry() viewer.poll_events() display.blit( pygame.surfarray.make_surface(np.transpose(imresize(frame_large, (window_size, window_size)), (1, 0, 2)) ), (0, 0) ) pygame.display.update() clock.tick(30)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def launch_webcam(self):\n global face_encoding\n\n # Call the image_import.add_user method which launches the camera and\n # returns the face encodings if a new picture is taken\n face_encoding = image_import.add_user()\n\n # Check if a new image was returned from the add_user method\n if len(face_encoding) == 128:\n # Confirm if a new image has been captured\n self.label_face_captured.setText(QtCore.QCoreApplication.translate(\"MainWindow\", \"Image Captured \"))\n self.check_box.show()\n self.check_box.setEnabled(True)\n self.check_box.setChecked(True)\n else:\n # Notify if a new image is not captured\n self.label_face_captured.setText(QtCore.QCoreApplication.translate(\"MainWindow\", \"No Image Captured\"))\n self.check_box.hide()", "def main():\n cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE)\n\n cap = cv2.VideoCapture(sys.argv[1])\n while cap.isOpened():\n ret, frame = cap.read()\n if not ret: # done\n break\n\n cv2.imshow('video', frame)\n\n key = cv2.waitKey(30)\n if key & 0xFF == ord('q'): # quit\n break\n\n cap.release()\n cv2.destroyAllWindows()", "def run(self):\n\n info(\"creating camera\")\n self.camera_controller = CameraController()\n self.camera_controller.camera.resolution = self.photo_resolution\n\n self.screen_resolution = ui.get_screen_resolution()\n self.normalized_screen_resolution = ui.normalize_dimension(self.screen_resolution)\n info(\"screen_resolution: %s\", self.screen_resolution)\n info(\"normalized_screen_resolution: %s\", self.normalized_screen_resolution)\n\n info(\"creating buffer image and canvas\")\n self.buffer_image = Image.new('RGB', self.normalized_screen_resolution)\n self.canvas = ImageDraw.Draw(self.buffer_image)\n debug(\"buffer_image resolution: %s\", self.buffer_image.size)\n\n info(\"creating preview renderer\")\n self.preview_renderer = self.camera_controller.start_preview(\n fullscreen=False,\n window=ui.normalize_dimension((\n 0, 0,\n self.normalized_screen_resolution[0] * 0.75,\n self.normalized_screen_resolution[1]\n )))\n debug(\"preview location: %s\", self.preview_renderer.window)\n\n info(\"creating window renderer\")\n self.window_renderer = self.camera_controller.add_overlay(\n self.buffer_image.tobytes(),\n size=self.buffer_image.size,\n fullscreen=False,\n layer=1,\n window=(\n 0, 0,\n self.normalized_screen_resolution[0],\n self.normalized_screen_resolution[1]\n ))\n debug(\"window location: %s\", self.window_renderer.window)\n\n info(\"setting up UI\")\n self._setup_ui()\n\n info(\"setting up input\")\n self.yes_button = GPIOButton(self.yes_pin)\n self.no_button = GPIOButton(self.no_pin)\n\n info(\"starting app\")\n self._enter_state(STATE_DEFAULT)\n self.render_timer.start()\n ui_context = ui.UIContext(self.canvas, self.window, update_function=self._logic)\n ui_context.main_loop()\n\n info(\"exiting\")", "def main():\n\n\t# Run setup\n\ts = Setup()\n\tcontrolRoom, studio, newscaster = s.beginSetup()\n\n\t# Start cameras\n\tcontrolRoom.startCameras()\n\tprint 'Cameras started'\n\tcontrolRoom.setCameraSize()\n\n\tif len(controlRoom.studio.cameras) > 1:\n\t\tprint 'Everything up and running...'\n\n\t\t# Fetch a camera that best matches the headpose angle\n\t\tcamera = controlRoom.getClosestCamera()\n\t\twhile True:\n\t\t\t# If advance camera selection algo indicates true, fetch camera closest to headpose\n\t\t\tif controlRoom.cameraSelectionADV():\n\t\t\t\tcamera = controlRoom.getClosestCamera()\n\t\t\tprint 'Active camera: ' + str(camera.cameraID)\n\t\t\t\n\t\t\t# Capture frame or in simulation mode, light up led\n\t\t\tcamera.capture()\n\n\telif len(controlRoom.studio.cameras) == 1:\n\t\twhile True:\n\t\t\tcontrolRoom.studio.cameras[0].capture()\n\t\t\ttime.sleep(2)\n\telse:\n\t\tprint 'No cameras found! Something seems to be wrong...'\n\n\t# Shutdown all cameras and kill all windows\n\tcontrolRoom.shutdownCameras()", "def showWebcam(model): \n\n cap = cv.VideoCapture(0)\n\n while(True):\n ret, frame = cap.read()\n detectUsingModel(model,frame)\n\n cv.imshow('frame',frame)\n if cv.waitKey(1) & 0xFF == ord('q'):\n break\n\n cap.release()\n cv.destroyAllWindows()", "def cameraOn():\n cap = cv2.VideoCapture(CAM0, cv2.CAP_DSHOW) # use camera to monitor the motor-mirror assemnbly by DirectShow\n while(True):\n # Capture frame-by-frame\n ret, frame = cap.read()\n\n # Display the resulting frame\n cv2.imshow(\" Real-Time Video. Press 'q' to exist.\",frame)\n if cv2.waitKey(8) & 0xFF == ord('q'): #display a frame for 8ms, ~120Hz\n break\n \n cap.release() # release the capture\n cv2.destroyAllWindows()", "def StartWebcam(self):\n if not os.path.exists('static'):\n os.mkdir('static')\n camera = olpc.Camera('static/webcam.png')\n camera.StartWebcam()", "def run(self):\n while True:\n self.ret, self.frame = self.cap.read()\n if self.ret:\n rgbImage = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB)\n convertToQtFormat = QImage(rgbImage.data, rgbImage.shape[1], rgbImage.shape[0], QImage.Format_RGB888)\n self.readyFrame = convertToQtFormat.scaled(500, 375, Qt.KeepAspectRatio)\n self.send_camera_view_to_gui.emit(self.readyFrame)", "def main():\n camera = picamera.PiCamera()\n camera.resolution = (RESOLUTIONX, RESOLUTIONY)\n camera.iso = 800\n time.sleep(2)\n while True:\n camera.capture('current-image.jpg')\n adapt_steering(navigation.get_xposition('current-image.jpg'))\n time.sleep(0.4)", "def camera_exec():\n pygame.init()\n locals()\n\n plot_num = 0\n running, Clock, font = camera_connect()\n while running:\n Clock.tick(100)\n\n # read framebuffer\n fb = None\n while (True) :\n try:\n fb = pyopenmv.fb_dump()\n break\n except Exception as e:\n # try and reconnect on failure\n camera_connect()\n\n # signal to UArm that camera has connected\n camera_started.set()\n if fb is not None:\n # create image from RGB888\n image = pygame.image.frombuffer(fb[2].flat[0:], (fb[0], fb[1]), 'RGB')\n screen = pygame.display.set_mode((fb[0], fb[1]), pygame.DOUBLEBUF, 32)\n\n fps = Clock.get_fps()\n # blit stuff\n screen.blit(image, (0, 0))\n screen.blit(font.render(\"FPS %.2f\"%(fps), 1, (255, 0, 0)), (0, 0))\n\n # update display\n pygame.display.flip()\n\n # get output from text buffer\n tx_len = pyopenmv.tx_buf_len()\n\n # object was found by camera if there is outputted text\n if tx_len:\n\n '''\n if UArm has signaled to the camera to identify the object and the camera has not already\n assigned values to the global variables associated with the object's location\n '''\n if camera_event.is_set() and (data_ready.is_set() is False):\n\n # read the most recent data at index 0 from the text buffer\n buff = pyopenmv.tx_buf(tx_len).decode()\n split_buff = str(buff).splitlines()\n if h_angle_key in split_buff[0]:\n\n # Most recent line in buff contains needed information\n global h_angle, v_angle, is_centered\n tok = split_buff[0].split()\n\n # set angles to corresponding values determined by camera\n h_angle, v_angle = float(tok[1]), float(tok[3])\n if tok[5] == \"True\":\n is_centered = True\n else:\n is_centered = False\n # signal that global variables have been set\n data_ready.set()\n\n if plot_ready.is_set():\n print(\"success_rate: \", success_history)\n plot_distance(distance_history, plot_num)\n plot_success(success_history, plot_num)\n plot_num += 1\n plot_ready.clear()\n print(\"success rate for \", len(success_history), \" tests: \",\n success_history.count(True) / len(success_history))\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n running = False\n elif event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n running = False\n if event.key == pygame.K_c:\n pygame.image.save(image, \"capture.png\")\n\n pygame.quit()\n pyopenmv.stop_script()", "def camera():\n while True:\n subprocess.check_output(['fswebcam', 'image.jpg'])\n sleep(60)", "def main():\n\n app = QApplication(sys.argv)\n win = TestWindow(TrainingGoalCalibrationTakePhotoScreen())\n win.show()\n sys.exit(app.exec_())", "def start_realsense_camera():\n global realsense_enabled, camera, write_bag\n\n if not realsense_enabled:\n realsense_enabled = True\n\n write_bag_path = None\n if write_bag:\n ###\n filename = datetime.datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n write_bag_path = \"realsense_files/\" + filename + \".bag\"\n\n camera = depth_cam(width=width, height=height, channels=channels,\n enable_rgb=enable_rgb, enable_depth=enable_depth, enable_imu=enable_imu, record_bag=write_bag_path, read_bag=None)\n\n return render_settings_view()", "def start(self):\n while True:\n requests.get(\"http://localhost:8080/clear\") #clearing the screen on the web browser\n speech=\"Welcome to Smart Mirror !!\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % speech) # calling the text to appear on the browser\n self.speech.synthesize_text(\"hello\"+speech) #synthesizing the text into speech\n speech1=\"Say The launch Phrase .\" #asking the user to say the lauch phrase\n self.speech.synthesize_text(speech1) #speaking of the above line,\n if self.vision.recognize_face(): #checking if\n print \"Face Found\"\t\t\t#the person is infront of camera\n if use_launch_phrase:\t\t\t#checking whether to use the launch phrase or not\n recognizer, audio = self.speech.listen_for_audio()\t\t#initializing\n if self.speech.is_call_to_action(recognizer, audio):\t#checking if the audio is recognized\n self.__acknowledge_action()\t\t\t#if it is recognized take action\n self.decide_action()\t\t\t#deciding which action to be taken\n else:\n self.decide_action()\t\t\t#printing the else part", "def openWebcam(self):\n \n # Récupérer l'id de la caméra entré par l'utilisateur\n self.device_id = int(self.device_id_text.text())\n \n # Prendre la main sur la webcam en créant un objet VideoCapture\n self.webcam = cv2.VideoCapture(self.device_id)\n \n # Verbose\n self.printToUser(\"Webcam #\"+str(self.device_id)+\" connected.\")", "def captureDisplay(title=\"Frame\"):\n\tcap = cv2.VideoCapture(0)\n\tret, frame = cap.read()\n\tcv2.namedWindow(title, cv2.WINDOW_NORMAL)\n\tcv2.imshow(title, frame)\n\tcap.release()\n\tcv2.waitKey(0)\n\tcv2.destroyAllWindows()", "def showCamera(self,**kwargs):\n try:\n side = kwargs['side']\n except Exception,e:\n rospy.logerr(\"%s\"%str(e))\n self.mm.neglect()\n return\n self.hideGUI(**{'update':False})\n self.baxter.camera.startCamera(side+\"_hand_camera\")\n self.locator.publish_camera = True", "def display_video_stream(self):\n _, frame = self.capture.read()\n\n if frame is None:\n return\n\n # hand rectangle dimensions\n x0 = self.video_size.height() - int(self.handrect_y + self.handrect_height / 2.)\n x1 = self.video_size.height() - int(self.handrect_y - self.handrect_height / 2.)\n y0 = int(self.handrect_x - self.handrect_width / 2.)\n y1 = int(self.handrect_x + self.handrect_width / 2.)\n cv2_p0 = (y0, x0)\n cv2_p1 = (y1, x1)\n self.video_size.width()\n\n frame, count_defects = find_gesture(frame, hand_p0=cv2_p1, hand_p1=cv2_p0,\n invert=self.checkBox_invert.isChecked())\n\n if count_defects == Gestures.ONE.value:\n cv2.putText(frame, \"closed fist\", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n elif count_defects == 2:\n cv2.putText(frame, \"Two fingers\", (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, 2)\n\n elif count_defects == 3:\n cv2.putText(frame, \"three fingers\", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n elif count_defects == 4:\n cv2.putText(frame, \"four fingers\", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n else:\n cv2.putText(frame, \"five fingers\", (50, 50), \\\n cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n\n if count_defects != self.current_gesture or count_defects == Gestures.ONE.value:\n # update progress time\n self.gesture_time = time()\n self.progressBar.setValue(0)\n else:\n self.current_gesture = count_defects\n\n dt = time() - self.gesture_time\n\n if dt < self.maxdt:\n # update progress bar\n self.progressBar.setValue(int(100*dt/self.maxdt))\n else:\n # trigger action\n cbox = self.comboboxes[max(min(4, count_defects-2), 0)]\n func_idx = cbox.currentIndex()\n callfunc = cbox.itemData(func_idx)\n callfunc()\n self.labelStatus.setText(\"{}, {}\".format(count_defects, cbox.itemText(func_idx)))\n\n # reset progress time\n self.gesture_time = time()\n\n self.current_gesture = count_defects\n frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n image = QImage(frame, frame.shape[1], frame.shape[0],\n frame.strides[0], QImage.Format_RGB888)\n self.image_label.setPixmap(QPixmap.fromImage(image))", "def run(self):\n\n # capture from web cam\n cap = cv2.VideoCapture(self.gstreamer_pipeline(), cv2.CAP_GSTREAMER)\n while self._run_flag:\n ret, cv_img = cap.read()\n if ret:\n self.change_pixmap_signal.emit(cv_img)\n # shut down capture system\n cap.release()", "def main():\n parser = ArgumentParser()\n parser.add_argument('pose_config', help='Config file for pose')\n parser.add_argument('pose_checkpoint', help='Checkpoint file for pose')\n parser.add_argument('--video-path', type=str, help='Video path')\n parser.add_argument('--show', type=str2bool, nargs='?',\n default=False, help=\"show results.\")\n parser.add_argument('--device', default='cpu',\n help='Device used for inference')\n parser.add_argument('--box-thr', type=float, default=0.1,\n help='Bounding box score threshold')\n parser.add_argument('--kpt-thr', type=float, default=0.1,\n help='Keypoint score threshold')\n parser.add_argument('--folder_box', type=str, default='')\n parser.add_argument('--save_pixels', type=str2bool, nargs='?',\n const=True, default=False,\n help='saveposes as pixels or ratio of im')\n parser.add_argument('--skip_rate', type=int, default=1)\n parser.add_argument('--flip', type=str2bool, default=False)\n parser.add_argument('--save_vid', type=str2bool, default=False)\n\n args = parser.parse_args()\n\n start(args)", "def main():\n\n # Retrieve singleton reference to system object\n system = PySpin.System.GetInstance()\n\n # Retrieve list of cameras from the system\n cam_list = system.GetCameras()\n\n num_cameras = cam_list.GetSize()\n\n print(\"Number of cameras detected:\", num_cameras)\n # Finish if there are no cameras\n if num_cameras == 0:\n # Clear camera list before releasing system\n cam_list.Clear()\n\n # Release system\n system.ReleaseInstance()\n\n print(\"Not enough cameras!\")\n \n return False\n\n cam = cam_list.GetByIndex(0)\n run_single_camera(cam)\n\n\n # Release reference to camera\n del cam\n\n # Clear camera list before releasing system\n cam_list.Clear()\n\n # Release instance\n system.ReleaseInstance()", "def run():\n\tglobal kinect \n\tkinect.depth_frame_ready += DEPTH\n\tkinect.depth_stream.open( nui.ImageStreamType.Depth, 2,\n\t\t\t\t\t\t\t nui.ImageResolution.Resolution320x240,\n\t\t\t\t\t\t\t nui.ImageType.Depth )\n\tcv2.namedWindow( 'VideoDEPTH', cv2.WINDOW_AUTOSIZE )", "def run(self):\n\n # initialize the video stream and allow the camera\n # sensor to warmup\n print(\"[Recorder] warming up camera...\")\n time.sleep(2.0)\n\n self._fourcc = cv2.VideoWriter_fourcc('M','J','P','G')\n #ret, frame = self._camera.read()\n #(self._height, self._width) = frame.shape[:2]\n print(\"[Recorder] can start\")\n self._is_ready = True\n ret = False\n frame = None\n stream = ioEx.BytesIO()\n \n while (not self._stop.is_set()):\n if args.use_usb:\n ret, frame = self._camera.read()\n else:\n ret = True\n self._camera.capture(stream, format='jpeg', use_video_port=True)\n # Construct a numpy array from the stream\n data = np.fromstring(stream.getvalue(), dtype=np.uint8)\n # \"Decode\" the image from the array, preserving colour\n frame = cv2.imdecode(data, 1)\n\n if ret==True:\n self._frame_lock.acquire()\n self._frame = frame\n self._frame_lock.release()\n\n # write the flipped frame\n self._writer_lock.acquire()\n if not (self._writer is None):\n self._writer.write(frame)\n self._writer_lock.release()\n time.sleep(0.001)\n\n if not (self._writer is None):\n self._writer.release()\n print('[Recorder] end thread')", "def run(self):\n try:\n # import here to prevent Panda3D from loading in the host process\n from .viewer_app import ViewerApp\n\n app = ViewerApp(*self._args, **self._kwargs)\n self._proc_conn.send(None)\n\n def _execute(task):\n for _ in range(100):\n if not self._proc_conn.poll(0.001):\n break\n name, args, kwargs = self._proc_conn.recv()\n if name == 'step':\n self._proc_conn.send(None)\n break # let the manager to execute other tasks\n try:\n reply = getattr(app, name)(*args, **kwargs)\n self._proc_conn.send(reply)\n except Exception as error:\n self._proc_conn.send(error)\n return task.cont\n\n app.task_mgr.add(_execute, \"Communication task\", -50)\n app.run()\n except Exception as error:\n self._proc_conn.send(error)\n else:\n self._proc_conn.send(ViewerClosedError(\n 'User closed the main window'))\n # read the rest to prevent the host process from being blocked\n if self._proc_conn.poll(0.05):\n self._proc_conn.recv()", "def start_capture(self):\r\n super(PostProcess, self)._start()\r\n from pi3d.Display import Display\r\n xx = Display.INSTANCE.width / 2.0 * (1.0 - self.scale)\r\n yy = Display.INSTANCE.height / 2.0 * (1.0 - self.scale)\r\n ww = Display.INSTANCE.width * self.scale\r\n hh = Display.INSTANCE.height * self.scale\r\n opengles.glEnable(GL_SCISSOR_TEST)\r\n opengles.glScissor(ctypes.c_int(int(xx)), ctypes.c_int(int(yy)),\r\n ctypes.c_int(int(ww)), ctypes.c_int(int(hh)))", "def display_full_view(self, name=\"Hand Detector\", exit_key=\"q\", connect_lines=True, display_names=False, video_capture=0):\n capture = cv.VideoCapture(video_capture)\n while True:\n success, view = capture.read()\n self.set_view(view)\n self._display_hand(connect_lines=connect_lines,\n display_names=display_names)\n cv.imshow(name, view)\n if cv.waitKey(20) & 0xFF == ord('q'):\n break\n capture.release()\n cv.destroyWindow(name)", "def show_webcam_and_run(model, emoticons, window_size=None, window_name='webcam', update_time=10):\n cv2.namedWindow(window_name, WINDOW_NORMAL)\n if window_size:\n width, height = window_size\n cv2.resizeWindow(window_name, width, height)\n\n # 选择摄像头,0为本地\n vc = cv2.VideoCapture(0) # http://192.168.0.2:4747/mjpegfeed para camara android remota por medio de Droidcam\n\n # 摄像头分辨率,默认为当前使用摄像头的最高分辨率\n\n # vc.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)\n # vc.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)\n\n if vc.isOpened():\n read_value, webcam_image = vc.read()\n else:\n print(\"[ERROR] No se enontro camara.\")\n return\n while read_value:\n for normalized_face, (x, y, w, h) in find_faces(webcam_image):\n prediction = network.predict(normalized_face) # hace la prediccion\n prediction = prediction[0] # guarda el numero de la emocion para diujar el emoji\n # carga el emoji para dibujarlo\n image_to_draw = emoticons[prediction.tolist().index(max(prediction))]\n # dibuja el emoji\n draw_with_alpha(webcam_image, image_to_draw, (x , y - 100, w, h)) # image_to_draw, , webcam_image,\n cv2.setWindowProperty(window_name, cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)\n cv2.imshow(window_name, webcam_image)\n read_value, webcam_image = vc.read()\n key = cv2.waitKey(update_time)\n if key == 27: # salir con esc\n break\n cv2.destroyWindow(window_name)", "def main():\n log.info(\"Waking up!\")\n face_cc, video_capture, voice = initialize()\n\n running = True # Is the program running?\n\n seen = None\n counter = 0\n while running:\n # TODO Feel\n # Watch\n frame, faces = get_frame(face_cc, video_capture, voice)\n counter += 1\n log.debug(repr(faces))\n if seen is None:\n seen = faces\n cv2.imshow('Video', frame)\n # Wait for input, TODO make it more generic loose from cv2\n read_keyboard_input = cv2.waitKey(1) & 0xFF\n if debug:\n log.debug(\"KEY: %s\" % read_keyboard_input)\n running = listen_signal(read_keyboard_input, voice)\n # Do something with faces detected in a given frame\n if counter % 9 == 0: ## Skip some frames\n log.debug(len(faces))\n if len(faces) > 0 and len(seen) > 0:\n log.debug(\"vustos\")\n log.debug(seen)\n log.debug(\"nuevos\")\n log.debug(faces)\n idx = 0\n for f in faces:\n log.debug(\"face %i\" % idx)\n log.debug(f)\n try:\n if not len(seen) < idx or not len(faces) < idx:\n if (seen[idx] & faces[idx]).any():\n sleep(1)\n speak((\"Hello person %i\" % idx), \"greet-person\", voice)\n except IndexError as err:\n log.error(err)\n finally:\n voice.get_engine().iterate()\n idx += 1\n \n \n exit(0 ,video_capture)", "def camstart():\n\n\trespond = send_command('camstart')", "def capture_camera(mirror=True, size=None):\n # カメラをキャプチャする\n cap = cv2.VideoCapture(0) # 0はカメラのデバイス番号\n #HAAR分類器の顔検出用の特徴量\n cascade_path = \"haarcascade_frontalface_alt.xml\"\n color = (255, 255, 255) #白\n #カスケード分類器の特徴量を取得する\n cascade = cv2.CascadeClassifier(cascade_path)\n\n while True:\n count = 0 #参照フレームのカウント\n # retは画像を取得成功フラグ\n ret, frame = cap.read()\n\n # 鏡のように映るか否か\n if mirror is True:\n frame = frame[:,::-1]\n\n # フレームをリサイズ\n # sizeは例えば(800, 600)\n if size is not None and len(size) == 2:\n frame = cv2.resize(frame, size)\n\n k = cv2.waitKey(1) # 1msec待つ\n\n if k == 13: # Enterキーで保存\n cv2.imwrite(\"test.png\", frame)\n\n if k == 27: # ESCキーで終了\n break\n\n\n if count == 10 or count == 0: # 参照フレーム軽減\n #グレースケール変換\n image_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n #物体認識(顔認識)の実行\n facerect = cascade.detectMultiScale(image_gray, scaleFactor=1.1, minNeighbors=1, minSize=(1, 1))\n count = 1\n else:\n count = count + 1\n #rect = (50,50,50,50)\n image = cv2.imread('lena.jpeg')\n #cv2.rectangle(image), tuple([50,50]), tuple([50,50]), color, thickness=2)\n\n if len(facerect) > 0:\n #if True:\n #検出した顔を囲む矩形の作成\n print (\"face rectangle\")\n print (facerect)\n for rect in facerect:\n cv2.rectangle(image, tuple(rect[0:2]),tuple(rect[0:2]+rect[2:4]), color, thickness=2)\n print('check')\n\n # フレームを表示する\n cv2.imshow('camera capture', frame)\n\n # キャプチャを解放する\n cap.release()\n cv2.destroyAllWindows()" ]
[ "0.63754654", "0.6237571", "0.6205102", "0.6102088", "0.6093408", "0.60862", "0.60601264", "0.60538155", "0.60299253", "0.60107696", "0.5936067", "0.5913625", "0.5909038", "0.5898597", "0.5890865", "0.58582836", "0.57765263", "0.5755932", "0.57481176", "0.5738701", "0.56991893", "0.56725967", "0.56661713", "0.5662055", "0.56583935", "0.565461", "0.5637662", "0.56241417", "0.5617941", "0.5579945" ]
0.7060998
0
Splits a nengo Connection into separate exc and inh Connections.
def split_exc_inh(conn, net, exc_synapse, inh_synapse): net.connections.remove(conn) solver_set = SolverSet(conn.solver, limit=2) with net: conn_exc = nengo.Connection( conn.pre, conn.post, solver=PositiveOnly(solver_set), synapse=exc_synapse, function=conn.function, eval_points=conn.eval_points, scale_eval_points=conn.scale_eval_points, transform=conn.transform, ) conn_inh = nengo.Connection( conn.pre, conn.post, solver=NegativeOnly(solver_set), synapse=inh_synapse, function=conn.function, eval_points=conn.eval_points, scale_eval_points=conn.scale_eval_points, transform=conn.transform, ) return conn_exc, conn_inh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_special_connections(self):\n\t\tfor connection in self._infoSpecialConnections:\n\t\t\t# List of source cells ids\n\t\t\tsourcesId = self.cellsId[connection[0]][connection[1]]\n\t\t\t# gather the sources all together\n\t\t\tsourcesId = comm.gather(sourcesId,root=0)\n\t\t\tif rank==0: sourcesId = sum(sourcesId,[])\n\t\t\tsourcesId = comm.bcast(sourcesId,root=0)\n\t\t\t# List of taget cells ids\n\t\t\ttargetsId = self.cellsId[connection[2]][connection[3]]\n\t\t\t# Ratio of connection\n\t\t\tconRatio = connection[4]\n\t\t\t# Number of connections\n\t\t\tconNum = int(connection[5])\n\t\t\t# Weight of connections\n\t\t\tconWeight = float(connection[6])\n\t\t\t# Type of synapse\n\t\t\tsynType = connection[7]\n\t\t\t# connect sources to targets\n\t\t\tself._connect(sourcesId,targetsId,conRatio,conNum,conWeight,synType)", "def generate_connection_e(self,N_e):\n raise NotImplementedError", "def breakConnections(self):\n for connections in pm.listConnections(self.data['shapeNode'], plugs=True, connections=True):\n # if connections[-1].nodeType() in ['shadingEngine', 'displacementShader']:\n if cmds.getClassification(connections[-1].nodeType(), satisfies=\"shader\"):\n pm.disconnectAttr(str(connections[-1]), str(connections[0]))\n self.logger.info(\"Break Connection : %s > %s\" % (str(connections[-1]), str(connections[0])))", "def generate_connection_i(self,N_e):\n raise NotImplementedError", "def add_new_connections(session, cobj, conn_list, at_date):\n start_at = int(at_date.gps)\n data = []\n\n for conn in conn_list:\n cobj.connection(\n upstream_part=conn[0],\n up_part_rev=conn[1],\n downstream_part=conn[3],\n down_part_rev=conn[4],\n upstream_output_port=conn[2],\n downstream_input_port=conn[5],\n start_gpstime=start_at,\n stop_gpstime=None,\n )\n print(\"Starting connection {} at {}\".format(cobj, str(at_date)))\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"upstream_part\",\n cobj.upstream_part,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"up_part_rev\",\n cobj.up_part_rev,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"downstream_part\",\n cobj.downstream_part,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"down_part_rev\",\n cobj.down_part_rev,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"upstream_output_port\",\n cobj.upstream_output_port,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"downstream_input_port\",\n cobj.downstream_input_port,\n ]\n )\n data.append(\n [\n cobj.upstream_part,\n cobj.up_part_rev,\n cobj.downstream_part,\n cobj.down_part_rev,\n cobj.upstream_output_port,\n cobj.downstream_input_port,\n cobj.start_gpstime,\n \"start_gpstime\",\n cobj.start_gpstime,\n ]\n )\n with mc.MCSessionWrapper(session=session) as session:\n update_connection(session, data, True)", "def split_network(self):\n disconnect_nodes(self.nodes[1], 2)\n disconnect_nodes(self.nodes[2], 1)\n self.sync_all([self.nodes[:2], self.nodes[2:]])", "def __init__(self, connections):\n self._connections = connections.split()", "def _out_connections(self, g, tick):\n # outputs could be connected to many different input ports - this is not yet covered\n out_connections=[]\n output_map = {}\n # get the out connections of the given task\n for source,dest in g.get_out_connections(tick):\n if source.port not in output_map.keys():\n output_map[source.port]=[]\n output_map[source.port].append(dest)\n for source,dest in self.body_graph.get_in_connections(graph.FINAL_TICK):\n out_source=graph.Endpoint(source.tick << tick, source.port)\n portname=dest.port\n for out_dest in output_map[portname]:\n out_connections.append((out_source, out_dest))\n return out_connections", "def extract_conn_tags(connection):\n try:\n host, port = connection.host.split(\":\")\n return {\n net.TARGET_HOST: host,\n net.TARGET_PORT: port,\n kombux.VHOST: connection.virtual_host,\n }\n except AttributeError:\n # Unlikely that we don't have .host or .virtual_host but let's not die over it\n return {}", "def _create_common_connections(self):\n\t\tfor muscle,muscAfferentDelay in self._infoMuscles:\n\t\t\tfor connection in self._infoCommonMuscleConnections:\n\t\t\t\t# List of source cells ids\n\t\t\t\tsourcesId = self.cellsId[muscle][connection[0]]\n\t\t\t\t# gather the sources all together\n\t\t\t\tsourcesId = comm.gather(sourcesId,root=0)\n\t\t\t\tif rank==0: sourcesId = sum(sourcesId,[])\n\t\t\t\tsourcesId = comm.bcast(sourcesId,root=0)\n\t\t\t\t# List of taget cells ids\n\t\t\t\ttargetsId = self.cellsId[muscle][connection[1]]\n\t\t\t\t# Ratio of connection\n\t\t\t\tconRatio = connection[2]\n\t\t\t\t# Number of connections\n\t\t\t\tconNum = int(connection[3])\n\t\t\t\t# Weight of connections\n\t\t\t\tconWeight = float(connection[4])\n\t\t\t\t# Type of synapse\n\t\t\t\tsynType = connection[5]\n\t\t\t\t# connect sources to targets\n\t\t\t\tself._connect(sourcesId,targetsId,conRatio,conNum,conWeight,synType)", "def connexify(self, estimator, nb_connect=5, verbose=False):\n connex_groups_id = list(self.graph.connex_groups)\n connex_pairs = permutations(connex_groups_id, 2)\n new_edges = []\n for conidx1, conidx2 in connex_pairs:\n for _ in range(nb_connect):\n node_idx1 = random.choice(self.graph.connex_groups[conidx1])\n node_idx2 = random.choice(self.graph.connex_groups[conidx2])\n state1 = self.graph.nodes[node_idx1]\n state2 = self.graph.nodes[node_idx2]\n success, X_opt, U_opt, V_opt = self.opt_trajectories(\n (state1, state2), estimator,\n verbose=verbose)\n if success:\n new_edges.append(((node_idx1, node_idx2),\n X_opt, U_opt, V_opt))\n\n for edge in new_edges:\n self.graph.add_edge(*edge)", "def expand(self):\n try:\n self.create_connection()\n except Exception as excpt:\n print(excpt)", "def connection_handler(self):\n\t\tyield", "def get_incoming_connections(self, comp):\n in_connections = []\n for comp_id, connections in self.connections.items():\n for connection in connections:\n source, name = connection\n if source == comp.data:\n in_connections.append(connection)\n return in_connections", "def __init__(self,connection):\n self.onedir = connection\n super(myEventHandler,self).__init__()", "def _connect_ping_listener(connection, branch):\n if branch:\n return\n\n save_should_close_with_result = connection.should_close_with_result\n connection.should_close_with_result = False\n try:\n connection.scalar(select([1]))\n except Exception as ex:\n connection.scalar(select([1]))\n finally:\n connection.should_close_with_result = save_should_close_with_result", "def handle(self):\n assert self.prepared, \"You have to call prepare before handle\"\n rset, wset, xset = self._select()\n for readable in rset:\n if readable == self._read.fileno():\n # don't care i just need to clean readable flag\n self._read.recv(1024)\n elif readable in self.socket.handles:\n client_socket = self.socket.accept()\n connection = Connection(client_socket, self)\n self.clients[client_socket.fileno()] = connection\n else:\n connection = self.clients[readable]\n connection.read()\n if connection.status == WAIT_PROCESS:\n itransport = TTransport.TMemoryBuffer(connection.message)\n\n # Header protocol needs oprot == iprot. This implies the\n # input memory buffer is reused for output too.\n if isinstance(self.in_protocol, THeaderProtocolFactory):\n omembuf = itransport\n iprot = self.in_protocol.getProtocol(itransport)\n oprot = iprot\n else:\n # Otherwise, assume we need a TFramedTransport.\n omembuf = TTransport.TMemoryBuffer()\n itransport = TTransport.TFramedTransport(itransport)\n otransport = TTransport.TFramedTransport(omembuf)\n iprot = self.in_protocol.getProtocol(itransport)\n oprot = self.out_protocol.getProtocol(otransport)\n\n if self.max_queue_size == 0 or \\\n self.tasks.qsize() <= self.max_queue_size:\n self.tasks.put([self.processor, iprot, oprot,\n omembuf, connection])\n else:\n logging.error(\n \"Queue max size of %d exceeded. Request rejected.\",\n self.max_queue_size)\n for writeable in wset:\n self.clients[writeable].write()\n for oob in xset:\n if oob in self.clients:\n connection = self.clients[oob]\n connection.close()", "def _handle_connection(self, conn):\n conn.serve_all()", "def simple_connect(hostport, delimiter = \"\\r\\n\"):\n #pylint: disable=W0404\n #reason: bug in pylint http://www.logilab.org/ticket/60828\n import multiprocessing\n queues = multiprocessing.Queue(100), multiprocessing.Queue(100)\n \n net_proc = multiprocessing.Process(\n target=named_runner(_run), \n name=\"Net\", \n args=(hostport, queues, delimiter))\n net_proc.start()\n return queues", "def connection_route_from_hosts(self, ingr, egr):\n assert(ingr is not None)\n assert(egr is not None)\n LOG.info(\"Try to connection-route %s -> %s\", ingr, egr)\n\n call_id = CallID(ingr)\n try:\n cep_src = ConnectionEP(ingr)\n cep_dst = ConnectionEP(egr)\n lsp = LspParams()\n\n (wero, pero) = self.routing.connectionRoute(cep_src.ident,\n cep_dst.ident,\n call_id.ident,\n lsp.ident,\n [])\n # in any case flush the call\n self.routing.callFlush(call_id.ident)\n\n return (wero, pero)\n\n except PCERA.CannotFetchConnEndPoint, exe:\n LOG.error(\"CannotFetchConnEndPoint exception: %s\", str(exe))\n except PCERA.ConnectionParamsMismatch, exe:\n LOG.error(\"ConnectionParamsMismatch exception: %s\", str(exe))\n except PCERA.ConnectionEroMismatch, exe:\n LOG.error(\"ConnectionEroMismatch exception: %s\", str(exe))\n except PCERA.ConnectionEroMismatch, exe:\n LOG.error(\"ConnectionEroMismatch exception: %s\", str(exe))\n except PCERA.NoRoute, exe:\n LOG.error(\"NoRoute exception: %s\", str(exe))\n except PCERA.CannotFetchCall, exe:\n LOG.error(\"CannotFetchCall exception: %s\", str(exe))\n except PCERA.InternalProblems, exe:\n LOG.error(\"InternalProblems exception: %s\", str(exe))\n except Exception, exe:\n LOG.error(\"Generic exception: %s\", str(exe))\n\n return (None, None)", "def _identify_connection(self):\n pass #nothing to identify...\n #raise NotImplementedError(\"Implement!\")", "def ouverture_connection():\n #driver://username:password@host:port/database\n pg_db = create_engine('postgresql://alain:nostromos@localhost:5432/gites_wallons',\n convert_unicode=True,\n encoding='utf-8')\n connection = pg_db.connect()\n hebergements = connection.execute(\" \\\n select \\\n hebergement.heb_pk, \\\n hebergement.heb_adresse, \\\n hebergement.heb_localite, \\\n hebergement.heb_cgt_cap_min, \\\n hebergement.heb_cgt_cap_max, \\\n hebergement.heb_cgt_nbre_chmbre, \\\n link_hebergement_epis.heb_nombre_epis, \\\n hebergement.heb_lit_1p, \\\n hebergement.heb_lit_2p, \\\n hebergement.heb_lit_sup, \\\n hebergement.heb_lit_enf, \\\n type_heb.type_heb_nom, \\\n hebergement.heb_coordonnee, \\\n proprio.pro_prenom1, \\\n proprio.pro_prenom2, \\\n proprio.pro_nom1, \\\n proprio.pro_nom2, \\\n hebergement.heb_nom, \\\n hebergement.heb_gid_activite_nature, \\\n hebergement.heb_gid_theme_equestre, \\\n hebergement.heb_gid_peche, \\\n hebergement.heb_gid_panda, \\\n hebergement.heb_gid_patrimoine, \\\n hebergement.heb_gid_antiallergique, \\\n hebergement.heb_gid_access_tous, \\\n hebergement.heb_gid_bebe_tendresse, \\\n hebergement.heb_gid_beau_jardin, \\\n hebergement.heb_gid_eco_gite, \\\n proprio.pro_tel_priv, \\\n proprio.pro_gsm1, \\\n commune.com_nom, \\\n commune.com_cp, \\\n proprio.pro_email, \\\n hebergement.heb_tarif_we_bs, \\\n hebergement.heb_tarif_we_ms, \\\n hebergement.heb_tarif_we_hs, \\\n hebergement.heb_tarif_sem_bs, \\\n hebergement.heb_tarif_sem_ms, \\\n hebergement.heb_tarif_sem_hs, \\\n hebergement.heb_fumeur, \\\n hebergement.heb_animal \\\n from \\\n hebergement left outer join link_hebergement_epis on link_hebergement_epis.heb_pk = hebergement.heb_pk, \\\n commune, \\\n type_heb, \\\n proprio \\\n where \\\n hebergement.heb_typeheb_fk in (1,2,3,4,7,10) \\\n and \\\n commune.com_pk=hebergement.heb_com_fk \\\n and \\\n type_heb.type_heb_pk=hebergement.heb_typeheb_fk \\\n and \\\n proprio.pro_pk=hebergement.heb_pro_fk \\\n and \\\n proprio.pro_etat=True \\\n and \\\n hebergement.heb_site_public = '1' \\\n order by \\\n hebergement.heb_localite, \\\n proprio.pro_nom1, \\\n hebergement.heb_nom\")\n return hebergements", "def get_connection_genes(key, config):\n gene1 = ConnectionGene(key, config)\n gene1.enabled = True\n gene1.weight = 0\n gene2 = ConnectionGene(key, config)\n gene2.enabled = False\n gene2.weight = 1\n return gene1, gene2", "def make_connection(self, pre_pop, post_pop):\n #if pre_pop == None and post_pop == None:\n # pre_pop = self.S.filter_units(pre_pop_tags)\n # post_pop = self.S.filter_units(post_pop_tags)\n # iterate through connections\n for pre_tags, post_tags, pre_portID, post_portID in self.connections:\n # get acual node uids\n pre = self.S.filter_units(pre_tags, subset=pre_pop)\n post = self.S.filter_units(post_tags, subset=post_pop)\n # assert unique identification\n assert len(pre) == 1 and len(post) == 1\n # if so, go ahead and connect\n self.S.connect(pre.pop(), pre_portID, post.pop(), post_portID)", "def get_outgoing_connections(self, comp):\n return self.connections.get(comp.id, [])", "def _add_connection(self, con):\n # get connectors by the above specified labels\n start = self.connector_by_label(con[0])\n end = self.connector_by_label(con[1])\n if start.parent_type == 'box' and end.parent_type == 'box':\n # make sure, that not two inputs or two outputs are connected\n if start.connector_type == end.connector_type:\n raise ConnectorError(f\"Connection {con} connects \"\n f\"input to input or output to output.\")\n # make sure, that inputs are always first\n # and outputs are always second\n elif (start.connector_type == 'output'\n or end.connector_type == 'input'):\n start, end = end, start\n # make sure, that a switch does not connect to itself\n elif start.parent_type == 'switch' and end.parent_type == 'switch':\n if start.switch == end.switch:\n raise ConnectorError(f\"Connection {con} connects \"\n f\"a switch to itself.\")\n\n # create connection\n connection = ArduinoSwitchControlConnection(start, end)\n\n # add connection to attributes\n self.connections.append(connection)", "def __init__(self, *args):\r\n \r\n self.bl = None\r\n self.buddy = None\r\n self.connection = None\r\n \r\n #\r\n # incoming\r\n #\r\n #__init__(self, bl, connection, command, encoded)\r\n if type(args[0]) == BuddyList:\r\n self.bl = args[0]\r\n self.connection = args[1]\r\n if self.connection:\r\n self.buddy = self.connection.buddy\r\n self.command = args[2]\r\n \r\n # decode from line format to raw binary\r\n # and then let the message parse it \r\n self.blob = decodeLF(args[3])\r\n self.parse()\r\n \r\n # the incoming message is now properly initialized and somebody\r\n # could now call its execute() method to trigger its action\r\n return\r\n \r\n \r\n #\r\n # outgoing\r\n #\r\n #__init__(self, connection, blob)\r\n #__init__(self, buddy, blob)\r\n if type(args[0]) in [InConnection, OutConnection, Buddy]:\r\n if type(args[0]) in [InConnection, OutConnection]:\r\n self.connection = args[0]\r\n if self.connection.buddy:\r\n self.buddy = self.connection.buddy\r\n \r\n elif type(args[0]) == Buddy:\r\n self.buddy = args[0]\r\n self.connection = self.buddy.conn_out\r\n \r\n if len(args) > 1:\r\n blob = args[1]\r\n if type(blob) in [list, tuple]:\r\n self.blob = \" \".join(str(part) for part in blob)\r\n else:\r\n self.blob = str(blob)\r\n else:\r\n self.blob = \"\"\r\n \r\n self.command = type(self).__name__[12:]", "def init_connection(self, connection):", "def test_reconnect_another(self):\n line, head = self._get_line()\n self.tool.connect(line, head, (120, 50))\n cinfo = self.canvas.get_connection(head)\n assert cinfo is not None\n item = cinfo.connected\n port = cinfo.port\n constraint = cinfo.constraint\n\n assert item == self.box1\n assert port == self.box1.ports()[0]\n assert item != self.box2\n\n # connect to box2, handle's connected item and connection data\n # should differ\n self.tool.connect(line, head, (120, 150))\n cinfo = self.canvas.get_connection(head)\n assert cinfo is not None\n self.assertEqual(self.box2, cinfo.connected)\n self.assertEqual(self.box2.ports()[0], cinfo.port)\n\n # old connection does not exist\n self.assertNotEqual(item, cinfo.connected)\n self.assertNotEqual(constraint, cinfo.constraint)", "def getConnections():\n\n c = psutil.net_connections()\n connects = {}\n\n count = 0\n for connection in c:\n conn = {}\n status = connection.status\n if status == 'ESTABLISHED' or connection.status == 'CLOSE_WAIT':\n conn['status'] = status\n conn['local'] = connection.laddr[0] + ':' + str(connection.laddr[1])\n conn['remote'] = connection.raddr[0] + ':' + str(connection.raddr[1])\n connects[count] = conn\n count += 1\n elif status == 'LISTEN':\n conn['status'] = status\n conn['local'] = connection.laddr[0] + ':' + str(connection.laddr[1])\n connects[count] = conn\n count += 1\n else:\n pass\n\n return connects" ]
[ "0.57460886", "0.5582826", "0.5544595", "0.55210364", "0.54384655", "0.5425718", "0.5386172", "0.5219724", "0.5060346", "0.5010697", "0.49985945", "0.49559784", "0.48724976", "0.4830181", "0.4822985", "0.48213017", "0.48176953", "0.4799253", "0.4786793", "0.47852033", "0.47785226", "0.4777965", "0.47752014", "0.47720534", "0.47639272", "0.47581694", "0.47543406", "0.47512567", "0.47512203", "0.4745073" ]
0.7375816
0
fetches a user accoutn object using guid
def __fetch_user_account(self, guid): try: user_account = UserAccount.objects.get(guid=guid) except Exception as e: logger.exception(e) else: return user_account
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_user(id):\n pass", "def get_one_user():", "async def fetch_self(self) -> OwnUser:\n return await self.app.rest.fetch_my_user()", "def get(self, no):\n user = get_a_user(no)\n if not user:\n api.abort(404)\n else:\n return user", "def getuser(gh, user):\n return gh.users(user).get()", "def load_user(uid):\n return User.query.get(uid)", "def fetch_user(uid):\n users = find_users(uid=uid)\n if users:\n return users[0]._asdict()\n return (\"Not found\", 404)", "def get_user(self, user_id):\n try:\n return Account.objects.get(pk=user_id)\n except Account.DoesNotExist:\n return None", "def user(email):\r\n return User.objects.get(email=email)", "async def get(self, request, uid):\n return await super(User, self).get_item(request.app.pool, 'user', uid)", "def fetchguid(self):\n if self['guid']:\n request = self._connection.get('people/{0}.json'.format(self['guid']))\n self._postproc(request)\n self._fetchstream()\n else:\n raise errors.UserError('GUID not set')", "def get_user(pk):\n user = UserService(user=pk).get_user_by_id()\n return CustomResponse(data=user).response()", "def getUser(client, attrs):\n try:\n return client.assertedSearch(\"User [name='%s']\" % attrs['name'])[0]\n except icat.SearchResultError:\n user = client.new(\"User\")\n initobj(user, attrs)\n user.create()\n return user", "def auth_by_guid(self):\n self.console.debug(\"Auth by guid: %r\", self.guid)\n try:\n return self.console.storage.getClient(self)\n except KeyError, msg:\n self.console.debug('User not found %s: %s', self.guid, msg)\n return False", "def get(cls, username, server, bucket=None):\n\t\tusername = cls._clean_username(username)\n\t\tif not username:\n\t\t\traise IDMException(\"you must provide a username\")\n\t\t\n\t\tres = cls.find_on({'type': 'user', 'username': username}, server, bucket)\n\t\tif res and len(res) > 0:\n\t\t\treturn res[0]\n\t\traise IDMException(\"no user with the given username\", 404)", "def cmd_account_user(client, args):\n account_user = client.get_account(args.username)\n data = account_user.__dict__\n generate_output({'account_user': data})", "def get_user(self, object_id):\n return self.get_object(\"user\", object_id)", "def load_user():\n\n return User.query.get(int(id))", "def getUserByuID(self, uID):\n cursor = self.conn.cursor()\n query = \"SELECT ufirstname, ulastname, udescription, urole, uclassification, email, pin \" \\\n \"FROM Users natural inner join Credential \" \\\n \"WHERE uID= %s;\"\n cursor.execute(query, (uID,))\n result = cursor.fetchone()\n return result", "async def get_user_account(self):\n ts = tools.get_cur_timestamp_ms()\n params = {\n \"timestamp\": str(ts)\n }\n success, error = await self.request(\"GET\", \"/api/v3/account\", params, auth=True)\n return success, error", "def getUserBycID(self, cID):\n\n cursor = self.conn.cursor()\n query = \"SELECT ufirstname, ulastname, udescription, urole, uclassification, email, pin \" \\\n \"FROM Users natural inner join Credential \" \\\n \"WHERE cID= %s;\"\n cursor.execute(query, (cID,))\n result = cursor.fetchone()\n return result", "def get_info(email):\n # Get the first user where _id=email\n user = models.User.objects.raw({\"_id\": email}).first()\n return user", "def get_user(self, user_id):\n oauth_user = OAuthioUser.objects.filter(user__id=user_id)\n if oauth_user.exists():\n return oauth_user.get().user", "def get_a_user(public_id):\n return User.query.filter_by(public_id=public_id).first()", "def GetAppEngineUser(user_id):\n email_address = GetEmailAddress(user_id)\n if email_address:\n return users.User(email_address)\n else:\n return None", "def load_user(user_email):\n return User.query.get(user_email)", "def get_user(self, user_id):\n uri = 'users/' + user_id\n return self.make_request(uri)", "def get_user_account(module, idrac):\n slot_uri, slot_id, empty_slot, empty_slot_uri = None, None, None, None\n if not module.params[\"user_name\"]:\n module.fail_json(msg=\"User name is not valid.\")\n response = idrac.export_scp(export_format=\"JSON\", export_use=\"Default\", target=\"IDRAC\", job_wait=True)\n user_attributes = idrac.get_idrac_local_account_attr(response.json_data, fqdd=\"iDRAC.Embedded.1\")\n slot_num = tuple(range(2, 17))\n for num in slot_num:\n user_name = \"Users.{0}#UserName\".format(num)\n if user_attributes.get(user_name) == module.params[\"user_name\"]:\n slot_id = num\n slot_uri = ACCOUNT_URI + str(num)\n break\n if not user_attributes.get(user_name) and (empty_slot_uri and empty_slot) is None:\n empty_slot = num\n empty_slot_uri = ACCOUNT_URI + str(num)\n return user_attributes, slot_uri, slot_id, empty_slot, empty_slot_uri", "async def get_user_account(self):\n uri = \"/fapi/v1/account\"\n ts = tools.get_cur_timestamp_ms()\n params = {\n \"timestamp\": str(ts)\n }\n success, error = await self.request(\"GET\", uri, params, auth=True)\n return success, error", "def load_user(_id):\n if utils.api_preflight():\n return User(_id=_id)" ]
[ "0.661174", "0.6249013", "0.6194991", "0.6135571", "0.60972863", "0.60675436", "0.604731", "0.5956097", "0.5941534", "0.5939436", "0.59257036", "0.59254116", "0.59202766", "0.5903319", "0.5896457", "0.5891398", "0.5853326", "0.5843873", "0.5839033", "0.5819695", "0.5809867", "0.5766343", "0.5761572", "0.5753574", "0.57487196", "0.5745337", "0.5744143", "0.57428813", "0.57392657", "0.5719338" ]
0.74750584
0
creates a payment object
def __create_payment_object(self, data, direction): from_account = self.__fetch_user_account(guid=data.get("from_account")) to_account = self.__fetch_user_account(guid=data.get("to_account")) payment_obj = Payment.objects.create( from_account=from_account, to_account=to_account, initiated_by=from_account, amount=data.get("amount"), direction=direction, ) return payment_obj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(**data):\n http_client = HttpClient()\n response, _ = http_client.post(routes.url(routes.PAYMENT_RESOURCE), data)\n return resources.Payment(**response)", "def create_payment(self, payment_request):\n data = payment_request.serialize()\n random_number = int(datetime.timestamp())\n\n headers = {\n 'app-id': self.app_id,\n 'Random': random_number,\n 'Hmac': self.generate_signature(random_number)\n }\n try:\n url = f'{self.base_url}/pos'\n response = requests.post(url, headers, data)\n if response != HTTPStatus.OK:\n raise Exception\n\n response_data = PaymentResponse.from_api_json(response.json())\n\n if response_data.is_success:\n raise OvoClientError(response_data.response_status)\n\n return response_data\n\n\n\n\n\n\n\n except Exception as exc:\n log.exception(f\"Failed to create new ovo payment for order {payment_request.reference_number}\")\n raise", "def post(self, args):\n with api.commit_or_abort(\n db.session,\n default_error_message=\"Failed to create a new Payment\"\n ):\n payment = Payment(**args)\n db.session.add(payment)\n return payment", "def create_payment(self,\n body):\n\n return super().new_api_call_builder.request(\n RequestBuilder().server('default')\n .path('/v2/payments')\n .http_method(HttpMethodEnum.POST)\n .header_param(Parameter()\n .key('Content-Type')\n .value('application/json'))\n .body_param(Parameter()\n .value(body))\n .header_param(Parameter()\n .key('accept')\n .value('application/json'))\n .body_serializer(APIHelper.json_serialize)\n .auth(Single('global'))\n ).response(\n ResponseHandler()\n .deserializer(APIHelper.json_deserialize)\n .is_api_response(True)\n .convertor(ApiResponse.create)\n ).execute()", "def create_incoming_payment(self, data):\n\n payment_obj = self.__create_payment_object(data=data, direction=True)\n self.__update_user_account(guid=data.get(\"to_account\"), amount=data.get(\"amount\"), direction=True)\n return payment_obj", "def create_invoice(cls, payment_request: Tuple[Dict[str, Any]], authorization: Tuple[Dict[str, Any]]) -> Dict:\n # pylint: disable=too-many-locals, too-many-statements\n business_info = payment_request.get('businessInfo')\n filing_info = payment_request.get('filingInfo')\n account_info = payment_request.get('accountInfo', None)\n corp_type = business_info.get('corpType', None)\n business_identifier = business_info.get('businessIdentifier')\n\n payment_account = cls._find_payment_account(authorization)\n payment_method = _get_payment_method(payment_request, payment_account)\n current_app.logger.info(f'Creating Payment Request : '\n f'{payment_method}, {corp_type}, {business_identifier}, '\n f'{payment_account.auth_account_id}')\n\n bcol_account = cls._get_bcol_account(account_info, payment_account)\n\n # Calculate the fees\n fees = _calculate_fees(corp_type, filing_info)\n\n # Create payment system instance from factory\n pay_service: PaymentSystemService = PaymentSystemFactory.create(\n payment_method=payment_method,\n corp_type=corp_type,\n fees=sum(fee.total for fee in fees),\n account_info=account_info,\n payment_account=payment_account\n )\n current_app.logger.info(f'Created Pay System Instance : {pay_service}')\n\n pay_system_invoice: Dict[str, any] = None\n invoice: Invoice = None\n\n try:\n invoice = Invoice()\n invoice.bcol_account = bcol_account\n invoice.payment_account_id = payment_account.id\n invoice.cfs_account_id = payment_account.cfs_account_id\n invoice.invoice_status_code = pay_service.get_default_invoice_status()\n invoice.service_fees = sum(fee.service_fees for fee in fees) if fees else 0\n invoice.total = sum(fee.total for fee in fees) if fees else 0\n invoice.paid = 0\n invoice.refund = 0\n invoice.routing_slip = get_str_by_path(account_info, 'routingSlip')\n invoice.filing_id = filing_info.get('filingIdentifier', None)\n invoice.dat_number = get_str_by_path(account_info, 'datNumber')\n invoice.folio_number = filing_info.get('folioNumber', None)\n invoice.business_identifier = business_identifier\n invoice.payment_method_code = pay_service.get_payment_method_code()\n invoice.corp_type_code = corp_type\n details = payment_request.get('details')\n if not details or details == 'null':\n details = []\n invoice.details = details\n invoice = invoice.flush()\n\n line_items = []\n for fee in fees:\n line_items.append(PaymentLineItem.create(invoice.id, fee))\n\n current_app.logger.info(f'Handing off to payment system to create invoice for {invoice.id}')\n invoice_reference = pay_service.create_invoice(payment_account, line_items, invoice,\n corp_type_code=invoice.corp_type_code)\n\n invoice.commit()\n\n pay_service.complete_post_invoice(invoice, invoice_reference)\n\n invoice = Invoice.find_by_id(invoice.id, skip_auth_check=True)\n\n except Exception as e: # NOQA pylint: disable=broad-except\n current_app.logger.error('Rolling back as error occured!')\n current_app.logger.error(e)\n if invoice:\n invoice.rollback()\n if pay_system_invoice:\n pay_service.cancel_invoice(\n payment_account,\n pay_system_invoice.get('invoice_number'),\n )\n raise\n\n current_app.logger.debug('>Finished creating payment request')\n\n return invoice.asdict(include_dynamic_fields=True)", "def _create_payment_transaction(self, amount, description):\n PaymentTransaction = Pool().get('payment_gateway.transaction')\n Date = Pool().get('ir.date')\n\n return PaymentTransaction(\n description=description or 'Auto charge from sale',\n date=Date.today(),\n party=self.sale.party,\n credit_account=self.credit_account,\n payment_profile=self.payment_profile,\n address=(\n self.payment_profile and\n self.payment_profile.address or self.sale.invoice_address\n ),\n amount=self.sale.currency.round(amount),\n currency=self.sale.currency,\n gateway=self.gateway,\n sale_payment=self.id,\n provider_reference=self.reference,\n )", "def create_outgoing_payment(self, data):\n\n payment_obj = self.__create_payment_object(data=data, direction=False)\n self.__update_user_account(guid=data.get(\"from_account\"), amount=data.get(\"amount\"), direction=False)\n return payment_obj", "def create_payment_data(self, **kwargs):\n\n order_id = kwargs.get('order_id')\n timestamp = datetime.now().strftime('%Y%m%d%H%M%S')\n\n currency = kwargs.get('currency', 'RON')\n amount = kwargs.get('amount')\n customer_id = kwargs.get('customer_id')\n\n # the description of the payment\n details = kwargs.get('details')\n\n billing = kwargs.get('billing', {})\n\n params = kwargs.get('params', {})\n\n # urls\n confirm_url = kwargs.get('confirm_url')\n return_url = kwargs.get('return_url')\n\n if not order_id or not amount or not customer_id or not details or not confirm_url or not return_url:\n if self.developement:\n debug(\"Arguments for create_payment_data: %s\", kwargs)\n\n raise Exception(\"Can't create mobilpay request with missing args.\")\n\n order_id = str(order_id)\n if len(order_id) > 64:\n raise Exception('order_id should not have more than 64 characters.')\n\n args = {\n # order tag\n \"order_id\": order_id,\n \"order_type\": \"card\",\n \"timestamp\": timestamp,\n\n # invoice tag\n \"amount\": amount, \n \"currency\": currency,\n \"customer_id\": customer_id,\n\n \"details\": details,\n\n # other params\n \"params\": params,\n\n # urls\n \"confirm_url\": confirm_url,\n \"return_url\": return_url\n }\n\n if billing:\n args['billing'] = {\n \"first_name\": billing.get('first_name', ''),\n \"last_name\": billing.get('last_name', ''),\n \"address\": billing.get('address', ''),\n \"phone\": billing.get('phone', ''),\n \"email\": billing.get('email', '')\n }\n\n # create the xml\n xml_message = self.create_request_xml(**args)\n\n if self.developement:\n debug(xml_message)\n\n return self.encrypt_message(xml_message)", "def create_payment(\n member, timestamp, amount, payment_strategy, first_unpaid=None,\n comments='', custom_fee=None):\n # get the latest unpaid monthly fee\n if first_unpaid is None:\n try:\n last_quota = Quota.objects.filter(member=member).latest()\n except Quota.DoesNotExist:\n first_unpaid = (member.first_payment_year, member.first_payment_month)\n else:\n first_unpaid = increment_year_month(last_quota.year, last_quota.month)\n first_unpaid_year, first_unpaid_month = first_unpaid\n\n # calculate how many fees covers the amount, supporting not being exact but for a very\n # small difference\n fee = member.category.fee if custom_fee is None else custom_fee\n paying_quant_real = amount / fee\n paying_quant_int = int(round(paying_quant_real))\n if abs(paying_quant_real - paying_quant_int) > paying_quant_int * 0.01:\n raise ValueError(\"Paying amount too inexact! amount={} fee={}\".format(amount, fee))\n\n # create the payment itself\n payment = Payment.objects.create(\n timestamp=timestamp, amount=amount, strategy=payment_strategy, comments=comments)\n\n # create the monthly fee(s)\n yearmonths = get_year_month_range(first_unpaid_year, first_unpaid_month, paying_quant_int)\n for year, month in yearmonths:\n Quota.objects.create(payment=payment, month=month, year=year, member=member)", "def __init__( self, payment, token, payment_action, payer_id ):\n\n if not isinstance(payment, fields.Payment ):\n raise ValueError( \n 'payment must be an instance of class <Payment>.' )\n\n if (token is None) or (len(token) != 20):\n raise ValueError( 'Invalid token argument' )\n\n if payment_action not in ['Sale','Authorization','Order']:\n raise ValueError( \n 'payment_action must be Sale, Authorization or Order.' )\n\n if (payer_id is None) or (len(payer_id) != 13):\n raise ValueError( 'Invalid payer id' )\n\n self._nvp_response = dict()\n self._nvp_request = dict()\n self._nvp_request['METHOD'] = 'DoExpressCheckoutPayment'\n \n nvp = copy.deepcopy( payment.get_nvp_request() )\n self._nvp_request.update( nvp )\n self._nvp_request['TOKEN'] = token\n self._nvp_request['PAYMENTACTION'] = payment_action\n self._nvp_request['PAYERID'] = payer_id", "def start_payment(owner, amount, payment_method):\n new_transaction_id = generate_transaction_id(prefix=str(owner.id))\n while Payment.payments.filter(transaction_id=new_transaction_id).exists():\n new_transaction_id = generate_transaction_id(prefix=str(owner.id))\n new_payment = Payment.payments.create(\n owner=owner,\n amount=amount,\n method=payment_method,\n transaction_id=new_transaction_id,\n started_at=timezone.now(),\n )\n return new_payment", "def make_payment(self, contact_id=None, date_cursor=None, amount=0):\n\n logger.info(\"Running make_payment for policy %s\" % self.policy.id)\n if not date_cursor:\n date_cursor = datetime.now().date()\n\n if not contact_id:\n try:\n contact_id = self.policy.named_insured\n except:\n logger.exception(\"Contact Id is not found for policy %s\" % self.policy.id)\n\n payment = Payment(self.policy.id,\n contact_id,\n amount,\n date_cursor)\n db.session.add(payment)\n db.session.commit()\n\n logger.info(\"Payment successful for policy %s\" % self.policy.id)\n\n return payment", "def __init__(self, payment_id, camper_id = '', camp_id = '', payment_date = '', paid_amount = ''):\r\n self.__payment_id__ = payment_id\r\n self.__camper_id__ = camper_id\r\n self.__camp_id__ = camp_id\r\n self.__payment_date__ = payment_date\r\n self.__paid_amount__ = paid_amount", "def test_create_single_payment(self):\n order = self.order\n data = {\n 'method' : 'paypal-rest-single',\n 'order_id' : order.pk,\n 'authorization' : self.ppsdk_single_payment\n } \n resp = self.api_client.post(self.url('payment'), data=data, \n format='json', \n authentication=self.credentials())\n self.assertHttpCreated(resp)\n rdata = self.deserialize(resp)\n self.assertEqual(Decimal(rdata['amount']), order.subtotal)\n self.assertEqual(rdata['currency'], order.currency)\n self.assertEqual(rdata['payment_module_key'], data['method'])\n self.assertNotIn('authorization', rdata)\n self.assertNotIn('data', rdata)\n self.assertEqual(int(rdata['status']), OrderPayment.PROCESSED)", "def create(payment, **data):\n if isinstance(payment, resources.Payment):\n payment = payment.id\n\n http_client = HttpClient()\n response, _ = http_client.post(routes.url(routes.REFUND_RESOURCE, payment_id=payment), data)\n return resources.Refund(**response)", "def _generate_transaction(\n payment: Payment,\n kind: str,\n amount: Decimal,\n *,\n id='',\n is_success=True,\n **data) -> Transaction:\n transaction = create_transaction(\n payment=payment,\n kind=kind,\n amount=amount,\n currency=data.pop('currency', payment.currency),\n gateway_response=data,\n token=id,\n is_success=is_success)\n return transaction", "def __init__(self, amount=None, payment_date=None, payment_holds=None, payment_method=None, payment_reference_id=None, payment_status=None): # noqa: E501 # noqa: E501\n self._amount = None\n self._payment_date = None\n self._payment_holds = None\n self._payment_method = None\n self._payment_reference_id = None\n self._payment_status = None\n self.discriminator = None\n if amount is not None:\n self.amount = amount\n if payment_date is not None:\n self.payment_date = payment_date\n if payment_holds is not None:\n self.payment_holds = payment_holds\n if payment_method is not None:\n self.payment_method = payment_method\n if payment_reference_id is not None:\n self.payment_reference_id = payment_reference_id\n if payment_status is not None:\n self.payment_status = payment_status", "def create_account_payment(self, order, user):\n access_token = get_random_string(20)\n domain = SysConfig.get_config('DOMAIN')\n\n with transaction.atomic():\n payment_txn = Transaction.objects.create(gateway=self.gateway,\n order=order,\n description='Transaction for order #%s' % order.id,\n status=Transaction.STATUS_PROCESSING,\n currency=order.currency.code,\n amount=order.charge_amount,\n updated_by=unicode(user),\n created_by=unicode(user))\n payment_txn.add_param('access_token', access_token, user)\n payment_txn.save()\n\n try:\n payment = {\n 'intent': 'sale',\n 'redirect_urls': {\n 'return_url': 'http://%s%s' % (domain, reverse('payments_process_account_success',\n args=[payment_txn.id, access_token])),\n 'cancel_url': 'http://%s%s' % (domain, reverse('payments_process_account_cancel',\n args=[payment_txn.id, access_token])),\n },\n 'payer': {\n 'payment_method': 'paypal',\n },\n 'transactions': [{\n 'item_list': {\n 'items': [{\n 'name': item.product.name,\n 'sku': item.product.name,\n 'price': _exchange_amount(item.price, order.exchange_rate),\n 'currency': order.currency.code,\n 'quantity': item.quantity\n } for item in order.items.all()]\n },\n 'amount': {\n 'total': unicode(order.charge_amount),\n 'currency': order.currency.code,\n 'details': {\n 'subtotal': _exchange_amount(order.sub_total, order.exchange_rate),\n 'tax': _exchange_amount(order.taxes, order.exchange_rate),\n 'shipping': _exchange_amount(order.shipping_cost, order.exchange_rate)\n }\n },\n 'description': 'Payment for order #%s' % (order.id)\n }],\n }\n\n logger.info('Processing PayPal account.', extra=payment)\n payment = paypalrestsdk.Payment(payment, api=self.api)\n payment_created = payment.create()\n except Exception as e:\n logger.error('Failed to process PayPal account (transaction_id: %s)' % payment_txn.id)\n logger.exception(e)\n\n raise DoorstepError('We failed to process your PayPal account at the moment, please try again later!')\n\n if payment_created:\n with transaction.atomic():\n payment_txn.add_param('id', unicode(payment.id), user)\n payment_txn.add_param('create_time', unicode(payment.create_time), user)\n payment_txn.add_param('update_time', unicode(payment.update_time), user)\n payment_txn.add_param('state', unicode(payment.state), user)\n payment_txn.add_param('intent', unicode(payment.intent), user)\n payment_txn.add_param('payment_method', unicode(payment.payer.payment_method), user)\n payment_txn.save()\n\n for link in payment.links:\n if link.rel == 'approval_url' and link.method == 'REDIRECT':\n return link.href\n\n payment_txn.status = Transaction.STATUS_FAILED\n payment_txn.error_message = payment.error['message']\n payment_txn.save()\n\n raise DoorstepError('We failed to process your PayPal account at the moment, please try again later!')", "def test_successful_create_payment_payu_pln(self):\n order = Order(name='Test PLN order', total=100, currency='PLN')\n order.save()\n response = self.client.post(reverse('getpaid-new-payment', kwargs={'currency' : 'PLN'}),\n {'order': order.pk,\n 'backend': 'getpaid.backends.payu'}\n )\n self.assertEqual(response.status_code, 302)\n Payment = get_model('getpaid', 'Payment')\n payment = Payment.objects.get(order=order.pk)\n self.assertEqual(payment.backend, 'getpaid.backends.payu')\n self.assertEqual(payment.amount, order.total)\n self.assertEqual(payment.currency, order.currency)\n self.assertEqual(payment.status, 'in_progress')\n self.assertEqual(payment.paid_on, None)\n self.assertEqual(payment.amount_paid, 0)", "def test_successful_create_payment_payu_pln(self):\n order = Order(name='Test PLN order', total=100, currency='PLN')\n order.save()\n response = self.client.post(reverse('getpaid-new-payment', kwargs={'currency': 'PLN'}),\n {'order': order.pk,\n 'backend': 'getpaid.backends.payu'}\n )\n self.assertEqual(response.status_code, 302)\n Payment = apps.get_model('getpaid', 'Payment')\n payment = Payment.objects.get(order=order.pk)\n self.assertEqual(payment.backend, 'getpaid.backends.payu')\n self.assertEqual(payment.amount, order.total)\n self.assertEqual(payment.currency, order.currency)\n self.assertEqual(payment.status, 'in_progress')\n self.assertEqual(payment.paid_on, None)\n self.assertEqual(payment.amount_paid, 0)", "def __init__(self,\r\n pay_period=None,\r\n billable=None,\r\n asset_id=None,\r\n pay_date=None,\r\n start_date=None,\r\n end_date=None,\r\n net_pay_current=None,\r\n net_pay_ytd=None,\r\n gross_pay_current=None,\r\n gross_pay_ytd=None,\r\n payroll_provider=None,\r\n employer=None,\r\n employee=None,\r\n pay_stat=None,\r\n deductions=None,\r\n direct_deposits=None,\r\n additional_properties = {}):\r\n\r\n # Initialize members of the class\r\n self.pay_period = pay_period\r\n self.billable = billable\r\n self.asset_id = asset_id\r\n self.pay_date = pay_date\r\n self.start_date = start_date\r\n self.end_date = end_date\r\n self.net_pay_current = net_pay_current\r\n self.net_pay_ytd = net_pay_ytd\r\n self.gross_pay_current = gross_pay_current\r\n self.gross_pay_ytd = gross_pay_ytd\r\n self.payroll_provider = payroll_provider\r\n self.employer = employer\r\n self.employee = employee\r\n self.pay_stat = pay_stat\r\n self.deductions = deductions\r\n self.direct_deposits = direct_deposits\r\n\r\n # Add additional model properties to the instance\r\n self.additional_properties = additional_properties", "def __init__(self, id, amount, merchant_account_id, plan_id, recurring, refund, status, transaction_source,\n created_at):\n self.id = id\n self.amount = amount\n self.merchant_account_id = merchant_account_id\n self.plan_id = plan_id\n self.recurring = recurring\n self.refund = refund\n self.status = status\n self.transaction_source = transaction_source\n self.created_at = created_at", "def getPayment(self):\n pass", "def process_payment(self, form):\n # Let the default processor handle surveys that don't require payment\n if not self.survey.get_requires_payment():\n return super(AuthorizenetSurveyPurchaseCreate, self).process_payment(form)\n\n user = self.request.user\n try:\n charge = authorize.Transaction.sale({\n \"amount\": self.survey.cost,\n \"email\": user.email,\n \"credit_card\": {\n \"card_number\": str(form.cleaned_data[\"card_number\"]),\n \"card_code\": str(form.cleaned_data[\"card_ccv\"]),\n \"expiration_date\": str(form.cleaned_data[\"card_expiry\"]),\n },\n \"billing\": {\n \"first_name\": user.first_name,\n \"last_name\": user.last_name,\n }\n })\n\n # Show any Authorize.net errors to the user\n except authorize.exceptions.AuthorizeError as exception:\n try:\n # Unpack exceptions with multiple error messages (AuthorizeInvalidError)\n errors = []\n for code, msg in exception.asdict().items():\n errors.append(forms.ValidationError(msg, code=code))\n raise forms.ValidationError(errors)\n except AttributeError:\n # Exception doesn't implement asdict() (AuthorizeError)\n raise forms.ValidationError(str(exception))\n\n # On success, save the transaction details to the form instance\n form.instance.amount = self.survey.cost\n form.instance.payment_method = \"Authorize.Net\"\n try:\n form.instance.transaction_id = charge[\"transaction_response\"][\"trans_id\"]\n except KeyError:\n form.instance.transaction_id = \"Unknown\"", "def to_create_payement_request(self):\n if not isinstance(self.reference, str):\n raise ValueError(\n 'reference should be string. This field is required')\n\n result = {\n 'amount': {\n # docs:https://docs.adyen.com/development-resources/currency-codes\n 'value': self.amount * 100,\n 'currency': self.currency\n },\n 'reference': self.reference,\n 'countryCode': self.country_code,\n }\n\n if self.shopper_reference and isinstance(self.shopper_reference, str):\n result['shopperReference'] = self.shopper_reference\n result['recurringProcessingModel'] = 'CardOnFile'\n result['storePaymentMethod'] = True\n\n return result", "def payment(self, amount=None):\n if amount is None:\n amount = random() * 1000\n transaction = {\"account_num\": choice(TRANSACTION_ACCT_LIST),\n \"amount\": amount,\n \"uuid\": generate_username()}\n with self.client.post(\"/payment\",\n data=transaction,\n catch_response=True) as response:\n if response.url is None or \"failed\" in response.url:\n response.failure(\"payment failed\")", "def post(self, payment_id=None):\n data = request.get_json()\n redirect_url = data.get('redirect_url')\n cart_token = data.get('cart_token')\n address_id = data.get('address_id')\n \n cart = Cart.query.filter_by(token=cart_token, user_id=current_user.id).first()\n if not cart:\n return {\"message\":\"No cart with this id\"}, 404\n\n if not address_id:\n return {\"message\": \"Please enter a address for your order\"}, 404\n\n order = Order.create_from_cart(cart_token, address_id)\n payment = Payment.query.filter_by(order_id=order.id).first()\n if not payment:\n payment = Payment(\n user_id=current_user.id, \n order_id=order.id, \n amount=order.total,\n status='Pending'\n )\n\n db.session.add(payment)\n db.session.commit()\n\n client = Client(current_app.config['ZARINPAL_WEBSERVICE'])\n mail = current_user._email\n\n if not mail:\n return {\"message\": \"Please enter your email address to continue the payment\"}\n\n user_info = UserAddress.query.filter_by(id=address_id).first()\n if user_info.phone:\n mobile = user_info.phone\n else:\n mobile = '' \n\n result = client.service.PaymentRequest(current_app.config['MERCHANT_ID'],\n payment.amount,\n 'nani',\n mail,\n mobile,\n redirect_url)\n\n payment.authority = result.Authority\n db.session.commit()\n if result.Status == 100:\n return {'payment_url':'https://www.zarinpal.com/pg/StartPay/' + result.Authority}\n else:\n return {\n 'message':\"We can't connect you to zarin pal server, right now. Please try again in a few moments.\"\n }, 404", "def __init__(self, paymentDict):\n self.createdTime = calendar.timegm(\n time.strptime(paymentDict['created_time'], '%Y-%m-%dT%XZ'))\n self.actor = paymentDict['actor']\n self.target = paymentDict['target']", "def create_payment_record(payer_id, timestamp=None, amount=DEFAULT_FEE):\n if timestamp is None:\n timestamp = make_aware(datetime.datetime(year=2017, month=2, day=5))\n record = {\n 'timestamp': timestamp,\n 'amount': amount,\n 'payer_id': payer_id,\n 'id_helper': {\n 'payment_id': str(uuid.uuid4()),\n },\n }\n return record" ]
[ "0.81836104", "0.73561925", "0.721683", "0.7197337", "0.70666915", "0.70559716", "0.6944051", "0.69413054", "0.68027186", "0.67675555", "0.6753749", "0.6652057", "0.6636959", "0.6498955", "0.6483282", "0.6450776", "0.64113176", "0.6373708", "0.6372573", "0.63036406", "0.62837046", "0.62639946", "0.6178893", "0.61530215", "0.6142087", "0.6133627", "0.61223644", "0.6121933", "0.61205137", "0.6120238" ]
0.79805636
1
public method for outgoing payment object creation
def create_outgoing_payment(self, data): payment_obj = self.__create_payment_object(data=data, direction=False) self.__update_user_account(guid=data.get("from_account"), amount=data.get("amount"), direction=False) return payment_obj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __create_payment_object(self, data, direction):\n\n from_account = self.__fetch_user_account(guid=data.get(\"from_account\"))\n to_account = self.__fetch_user_account(guid=data.get(\"to_account\"))\n payment_obj = Payment.objects.create(\n from_account=from_account,\n to_account=to_account,\n initiated_by=from_account,\n amount=data.get(\"amount\"),\n direction=direction,\n )\n return payment_obj", "def create_payment(self,\n body):\n\n return super().new_api_call_builder.request(\n RequestBuilder().server('default')\n .path('/v2/payments')\n .http_method(HttpMethodEnum.POST)\n .header_param(Parameter()\n .key('Content-Type')\n .value('application/json'))\n .body_param(Parameter()\n .value(body))\n .header_param(Parameter()\n .key('accept')\n .value('application/json'))\n .body_serializer(APIHelper.json_serialize)\n .auth(Single('global'))\n ).response(\n ResponseHandler()\n .deserializer(APIHelper.json_deserialize)\n .is_api_response(True)\n .convertor(ApiResponse.create)\n ).execute()", "def __init__( self, payment, token, payment_action, payer_id ):\n\n if not isinstance(payment, fields.Payment ):\n raise ValueError( \n 'payment must be an instance of class <Payment>.' )\n\n if (token is None) or (len(token) != 20):\n raise ValueError( 'Invalid token argument' )\n\n if payment_action not in ['Sale','Authorization','Order']:\n raise ValueError( \n 'payment_action must be Sale, Authorization or Order.' )\n\n if (payer_id is None) or (len(payer_id) != 13):\n raise ValueError( 'Invalid payer id' )\n\n self._nvp_response = dict()\n self._nvp_request = dict()\n self._nvp_request['METHOD'] = 'DoExpressCheckoutPayment'\n \n nvp = copy.deepcopy( payment.get_nvp_request() )\n self._nvp_request.update( nvp )\n self._nvp_request['TOKEN'] = token\n self._nvp_request['PAYMENTACTION'] = payment_action\n self._nvp_request['PAYERID'] = payer_id", "def create(**data):\n http_client = HttpClient()\n response, _ = http_client.post(routes.url(routes.PAYMENT_RESOURCE), data)\n return resources.Payment(**response)", "def create_incoming_payment(self, data):\n\n payment_obj = self.__create_payment_object(data=data, direction=True)\n self.__update_user_account(guid=data.get(\"to_account\"), amount=data.get(\"amount\"), direction=True)\n return payment_obj", "def create_payment(self, payment_request):\n data = payment_request.serialize()\n random_number = int(datetime.timestamp())\n\n headers = {\n 'app-id': self.app_id,\n 'Random': random_number,\n 'Hmac': self.generate_signature(random_number)\n }\n try:\n url = f'{self.base_url}/pos'\n response = requests.post(url, headers, data)\n if response != HTTPStatus.OK:\n raise Exception\n\n response_data = PaymentResponse.from_api_json(response.json())\n\n if response_data.is_success:\n raise OvoClientError(response_data.response_status)\n\n return response_data\n\n\n\n\n\n\n\n except Exception as exc:\n log.exception(f\"Failed to create new ovo payment for order {payment_request.reference_number}\")\n raise", "def awaiting_payment(self):", "def create_payment_data(self, **kwargs):\n\n order_id = kwargs.get('order_id')\n timestamp = datetime.now().strftime('%Y%m%d%H%M%S')\n\n currency = kwargs.get('currency', 'RON')\n amount = kwargs.get('amount')\n customer_id = kwargs.get('customer_id')\n\n # the description of the payment\n details = kwargs.get('details')\n\n billing = kwargs.get('billing', {})\n\n params = kwargs.get('params', {})\n\n # urls\n confirm_url = kwargs.get('confirm_url')\n return_url = kwargs.get('return_url')\n\n if not order_id or not amount or not customer_id or not details or not confirm_url or not return_url:\n if self.developement:\n debug(\"Arguments for create_payment_data: %s\", kwargs)\n\n raise Exception(\"Can't create mobilpay request with missing args.\")\n\n order_id = str(order_id)\n if len(order_id) > 64:\n raise Exception('order_id should not have more than 64 characters.')\n\n args = {\n # order tag\n \"order_id\": order_id,\n \"order_type\": \"card\",\n \"timestamp\": timestamp,\n\n # invoice tag\n \"amount\": amount, \n \"currency\": currency,\n \"customer_id\": customer_id,\n\n \"details\": details,\n\n # other params\n \"params\": params,\n\n # urls\n \"confirm_url\": confirm_url,\n \"return_url\": return_url\n }\n\n if billing:\n args['billing'] = {\n \"first_name\": billing.get('first_name', ''),\n \"last_name\": billing.get('last_name', ''),\n \"address\": billing.get('address', ''),\n \"phone\": billing.get('phone', ''),\n \"email\": billing.get('email', '')\n }\n\n # create the xml\n xml_message = self.create_request_xml(**args)\n\n if self.developement:\n debug(xml_message)\n\n return self.encrypt_message(xml_message)", "def create_invoice(cls, payment_request: Tuple[Dict[str, Any]], authorization: Tuple[Dict[str, Any]]) -> Dict:\n # pylint: disable=too-many-locals, too-many-statements\n business_info = payment_request.get('businessInfo')\n filing_info = payment_request.get('filingInfo')\n account_info = payment_request.get('accountInfo', None)\n corp_type = business_info.get('corpType', None)\n business_identifier = business_info.get('businessIdentifier')\n\n payment_account = cls._find_payment_account(authorization)\n payment_method = _get_payment_method(payment_request, payment_account)\n current_app.logger.info(f'Creating Payment Request : '\n f'{payment_method}, {corp_type}, {business_identifier}, '\n f'{payment_account.auth_account_id}')\n\n bcol_account = cls._get_bcol_account(account_info, payment_account)\n\n # Calculate the fees\n fees = _calculate_fees(corp_type, filing_info)\n\n # Create payment system instance from factory\n pay_service: PaymentSystemService = PaymentSystemFactory.create(\n payment_method=payment_method,\n corp_type=corp_type,\n fees=sum(fee.total for fee in fees),\n account_info=account_info,\n payment_account=payment_account\n )\n current_app.logger.info(f'Created Pay System Instance : {pay_service}')\n\n pay_system_invoice: Dict[str, any] = None\n invoice: Invoice = None\n\n try:\n invoice = Invoice()\n invoice.bcol_account = bcol_account\n invoice.payment_account_id = payment_account.id\n invoice.cfs_account_id = payment_account.cfs_account_id\n invoice.invoice_status_code = pay_service.get_default_invoice_status()\n invoice.service_fees = sum(fee.service_fees for fee in fees) if fees else 0\n invoice.total = sum(fee.total for fee in fees) if fees else 0\n invoice.paid = 0\n invoice.refund = 0\n invoice.routing_slip = get_str_by_path(account_info, 'routingSlip')\n invoice.filing_id = filing_info.get('filingIdentifier', None)\n invoice.dat_number = get_str_by_path(account_info, 'datNumber')\n invoice.folio_number = filing_info.get('folioNumber', None)\n invoice.business_identifier = business_identifier\n invoice.payment_method_code = pay_service.get_payment_method_code()\n invoice.corp_type_code = corp_type\n details = payment_request.get('details')\n if not details or details == 'null':\n details = []\n invoice.details = details\n invoice = invoice.flush()\n\n line_items = []\n for fee in fees:\n line_items.append(PaymentLineItem.create(invoice.id, fee))\n\n current_app.logger.info(f'Handing off to payment system to create invoice for {invoice.id}')\n invoice_reference = pay_service.create_invoice(payment_account, line_items, invoice,\n corp_type_code=invoice.corp_type_code)\n\n invoice.commit()\n\n pay_service.complete_post_invoice(invoice, invoice_reference)\n\n invoice = Invoice.find_by_id(invoice.id, skip_auth_check=True)\n\n except Exception as e: # NOQA pylint: disable=broad-except\n current_app.logger.error('Rolling back as error occured!')\n current_app.logger.error(e)\n if invoice:\n invoice.rollback()\n if pay_system_invoice:\n pay_service.cancel_invoice(\n payment_account,\n pay_system_invoice.get('invoice_number'),\n )\n raise\n\n current_app.logger.debug('>Finished creating payment request')\n\n return invoice.asdict(include_dynamic_fields=True)", "def test_create_virtual_account_pay_out(self):\n pass", "def getPayment(self):\n pass", "def process_payment(self, form):\n # Let the default processor handle surveys that don't require payment\n if not self.survey.get_requires_payment():\n return super(AuthorizenetSurveyPurchaseCreate, self).process_payment(form)\n\n user = self.request.user\n try:\n charge = authorize.Transaction.sale({\n \"amount\": self.survey.cost,\n \"email\": user.email,\n \"credit_card\": {\n \"card_number\": str(form.cleaned_data[\"card_number\"]),\n \"card_code\": str(form.cleaned_data[\"card_ccv\"]),\n \"expiration_date\": str(form.cleaned_data[\"card_expiry\"]),\n },\n \"billing\": {\n \"first_name\": user.first_name,\n \"last_name\": user.last_name,\n }\n })\n\n # Show any Authorize.net errors to the user\n except authorize.exceptions.AuthorizeError as exception:\n try:\n # Unpack exceptions with multiple error messages (AuthorizeInvalidError)\n errors = []\n for code, msg in exception.asdict().items():\n errors.append(forms.ValidationError(msg, code=code))\n raise forms.ValidationError(errors)\n except AttributeError:\n # Exception doesn't implement asdict() (AuthorizeError)\n raise forms.ValidationError(str(exception))\n\n # On success, save the transaction details to the form instance\n form.instance.amount = self.survey.cost\n form.instance.payment_method = \"Authorize.Net\"\n try:\n form.instance.transaction_id = charge[\"transaction_response\"][\"trans_id\"]\n except KeyError:\n form.instance.transaction_id = \"Unknown\"", "def __init__(self, amount, currency_code, campaign_id=None, reference=None,\n transaction_date_time=None, customer_present:CustomerPresent=CustomerPresent.not_set,\n entry_mode:EntryMode=EntryMode.not_set, order_number=None, signature_captured=False,\n account_type:AccountType=AccountType.not_set, alternative_merchant_data=None, approval_code=None,\n batch_assignment=None, batch_id=None, cash_back_amount=None, employee_id=None, fee_amount=None,\n goods_type:GoodsType=GoodsType.not_set, industry_type:IndustryType=IndustryType.not_set,\n internet_transaction_data:InternetTransactionData=None, invoice_number=None, is_partial_shipment=None,\n is_quasi_cash=None, lane_id=None, partial_approval_capable=None, score_threshold=None,\n terminal_id=None, tip_amount=None, transaction_code=None,\n transaction_data_type:TransactionDataType=None, is_3_d_secure=None):\n self.__camelcase=constants.ALL_FIELDS\n self.__order=[]\n\n if transaction_data_type:\n self.i_type = transaction_data_type\n self.__order.append('$type')\n self.amount=amount\n self.__order.append('Amount')\n if campaign_id:\n self.campaign_id=campaign_id\n self.__order.append('CampaignId')\n self.currency_code=currency_code\n self.__order.append('CurrencyCode')\n if reference:\n self.reference=reference\n self.__order.append('Reference')\n self.transaction_date_time=transaction_date_time or datetime.datetime.now().isoformat()\n self.__order.append('TransactionDateTime')\n if transaction_code:\n self.transaction_code=transaction_code\n self.__order.append('TransactionCode')\n self.account_type=account_type\n self.__order.append('AccountType')\n if alternative_merchant_data:\n self.alternative_merchant_data=alternative_merchant_data\n self.__order.append('AlternativeMerchantData')\n self.approval_code=approval_code\n self.__order.append('ApprovalCode')\n if batch_assignment:\n self.batch_assignment=batch_assignment\n self.__order.append('BatchAssignment')\n if batch_id:\n self.batch_id=batch_id\n self.__order.append('BatchId')\n self.cash_back_amount=cash_back_amount if cash_back_amount else '0.00'\n self.__order.append('CashBackAmount')\n self.customer_present=customer_present\n self.__order.append('CustomerPresent')\n self.employee_id=employee_id\n self.__order.append('EmployeeId')\n self.entry_mode=entry_mode\n self.__order.append('EntryMode')\n self.fee_amount=fee_amount if fee_amount else '0.00'\n self.__order.append('FeeAmount')\n self.goods_type=goods_type\n self.__order.append('GoodsType')\n self.industry_type=industry_type\n self.__order.append('IndustryType')\n self.internet_transaction_data=internet_transaction_data\n self.__order.append('InternetTransactionData')\n self.invoice_number=invoice_number\n if is_3_d_secure:\n self.is_3_d_secure=is_3_d_secure\n self.__order.append('Is3DSecure')\n self.__order.append('InvoiceNumber')\n if is_partial_shipment:\n self.is_partial_shipment=is_partial_shipment\n self.__order.append('IsPartialShipment')\n if is_quasi_cash:\n self.is_quasi_cash=is_quasi_cash\n self.__order.append('IsQuasiCash')\n if lane_id:\n self.lane_id=lane_id\n self.__order.append('LaneId')\n self.order_number=order_number\n self.__order.append('OrderNumber')\n if partial_approval_capable:\n self.partial_approval_capable=partial_approval_capable\n self.__order.append('PartialApprovalCapable')\n if score_threshold:\n self.score_threshold=score_threshold\n self.__order.append('ScoreThreshold')\n self.signature_captured=signature_captured\n self.__order.append('SignatureCaptured')\n if terminal_id:\n self.terminal_id=terminal_id\n self.__order.append('TerminalId')\n self.tip_amount=tip_amount if tip_amount else '0.00'\n self.__order.append('TipAmount')", "def to_create_payement_request(self):\n if not isinstance(self.reference, str):\n raise ValueError(\n 'reference should be string. This field is required')\n\n result = {\n 'amount': {\n # docs:https://docs.adyen.com/development-resources/currency-codes\n 'value': self.amount * 100,\n 'currency': self.currency\n },\n 'reference': self.reference,\n 'countryCode': self.country_code,\n }\n\n if self.shopper_reference and isinstance(self.shopper_reference, str):\n result['shopperReference'] = self.shopper_reference\n result['recurringProcessingModel'] = 'CardOnFile'\n result['storePaymentMethod'] = True\n\n return result", "def __init__(self, donorReference='', kind=\"other\", receiverReference='', serviceUnitsError=0.0, diverseReference='', serviceUnitsEnergy=0.0, reversedId='', PricingStructure=None, line=None, UserAttributes=None, AuxiliaryAccount=None, VendorShift=None, Receipt=None, Meter=None, CustomerAccount=None, CashierShift=None, *args, **kw_args):\n #: Reference to the entity that is the source of 'amount' (for example: customer for token purchase; or supplier for free issue token).\n self.donorReference = donorReference\n\n #: Kind of transaction. Values are: \"other\", \"serviceChargePayment\", \"accountPayment\", \"tokenSalePayment\", \"tokenCancellation\", \"taxChargePayment\", \"tokenExchange\", \"tokenGrant\", \"diversePayment\", \"auxiliaryChargePayment\", \"meterConfigurationToken\", \"tokenFreeIssue\", \"transactionReversal\"\n self.kind = kind\n\n #: Reference to the entity that is the recipient of 'amount' (for example, supplier for service charge payment; or tax receiver for VAT).\n self.receiverReference = receiverReference\n\n #: Number of service units not reflected in 'serviceUnitsEnergy' due to process rounding or truncating errors.\n self.serviceUnitsError = serviceUnitsError\n\n #: Formal reference for use with diverse payment (traffic fine for example).\n self.diverseReference = diverseReference\n\n #: Actual amount of service units that is being paid for.\n self.serviceUnitsEnergy = serviceUnitsEnergy\n\n #: (if 'kind' is transactionReversal) Reference to the original transaction that is being reversed by this transaction.\n self.reversedId = reversedId\n\n self._PricingStructure = None\n self.PricingStructure = PricingStructure\n\n self.line = line\n\n self._UserAttributes = []\n self.UserAttributes = [] if UserAttributes is None else UserAttributes\n\n self._AuxiliaryAccount = None\n self.AuxiliaryAccount = AuxiliaryAccount\n\n self._VendorShift = None\n self.VendorShift = VendorShift\n\n self._Receipt = None\n self.Receipt = Receipt\n\n self._Meter = None\n self.Meter = Meter\n\n self._CustomerAccount = None\n self.CustomerAccount = CustomerAccount\n\n self._CashierShift = None\n self.CashierShift = CashierShift\n\n super(Transaction, self).__init__(*args, **kw_args)", "def _create_payment_transaction(self, amount, description):\n PaymentTransaction = Pool().get('payment_gateway.transaction')\n Date = Pool().get('ir.date')\n\n return PaymentTransaction(\n description=description or 'Auto charge from sale',\n date=Date.today(),\n party=self.sale.party,\n credit_account=self.credit_account,\n payment_profile=self.payment_profile,\n address=(\n self.payment_profile and\n self.payment_profile.address or self.sale.invoice_address\n ),\n amount=self.sale.currency.round(amount),\n currency=self.sale.currency,\n gateway=self.gateway,\n sale_payment=self.id,\n provider_reference=self.reference,\n )", "def obj_create(self, bundle, **kwargs):\n logger.info(\"Creating a new acknowledgement...\")\n #Create the object\n bundle.obj = Acknowledgement()\n #hydrate\n bundle = self.full_hydrate(bundle)\n \n #Set the customer\n try:\n logger.info(\"Setting customer...\")\n bundle.obj.customer = Customer.objects.get(pk=bundle.data[\"customer\"][\"id\"])\n bundle.obj.discount = bundle.obj.customer.discount\n except:\n logger.error(\"Customer with ID {0} could not be found.\".format(bundle.data['customer']['id']))\n raise\n \n #Set the employee\n try:\n logger.info(\"Setting employee...\")\n bundle.obj.employee = bundle.request.user\n except User.DoesNotExist:\n logger.error(\"User with ID {0} could not be found\".format(bundle.data['employee']['id']))\n raise\n except KeyError:\n logger.critical(\"Missing employee ID.\")\n raise\n \n #Set Status\n bundle.obj.status = \"ACKNOWLEDGED\"\n \n #Set the project or create a new one\n if \"project\" in bundle.data:\n try:\n project = Project.objects.get(pk=bundle.data['project']['id'])\n except KeyError, Project.DoesNotExist:\n try:\n project = Project()\n project.codename = bundle.data['project']['codename']\n project.save()\n except KeyError:\n project = None\n \n bundle.obj.project = project\n \n #Create items without saving them \n logger.info(\"Creating items...\")\n self.items = [Item.create(acknowledgement=bundle.obj,\n commit=False,\n **product) for product in bundle.data[\"items\"]]\n \n #Calculate the total price\n logger.info(\"Calculating balance of the order...\")\n bundle.obj.calculate_totals(self.items)\n bundle = self.save(bundle)\n \n #Save the items\n logger.info(\"Saving the items to the database...\")\n for item in self.items:\n item.acknowledgement = bundle.obj\n item.save()\n \n log_message = \"Ack {0} created on {1}. Schedule to be delivered on {1}\"\n log_message = log_message.format(bundle.obj.id,\n bundle.obj.time_created.strftime('%B %d, %Y'),\n bundle.obj.delivery_date.strftime('%B %d, %Y'))\n log = Log(message=log_message,\n delivery_date=bundle.obj.delivery_date,\n acknowledgement=bundle.obj)\n log.save()\n #Create and upload the pdfs to the \n #S3 system. The save the pdfs as\n #Attributes of the acknowledgement\n logger.info(\"Creating PDF documents...\")\n bundle.obj.create_and_upload_pdfs()\n \n \n #Add the url of the pdf to the outgoing data\n #only for when an acknowledgement is create\n try:\n ack = bundle.obj.acknowledgement_pdf\n production = bundle.obj.production_pdf\n bundle.data['pdf'] = {'acknowledgement': ack.generate_url(),\n 'production': production.generate_url()}\n except AttributeError: \n logger.warn('Missing acknowledgement or production pdf')\n \n #Conditionally email ack to Decoroom\n if \"decoroom\" in bundle.obj.customer.name.lower():\n try:\n logger.info(\"Emailing Decoroom Co., Ltd. the order details...\")\n bundle.obj.email_decoroom()\n except Exception as e:\n logger.error(\"Unable to mail decoroom.\")\n logger.error(e)\n \n \n \n logger.info(u\"Acknowledgement #{0} created for {1}\".format(bundle.obj.id, \n bundle.obj.customer.name)) \n return bundle", "def SendPayment(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def make_payment():\n\n response = VoiceResponse()\n if 'caller_name' not in session:\n session['caller_name'] = request.args.get(\n 'caller_name') or \"Twilio Payment\"\n if 'payment_amount' not in session:\n session['payment_amount'] = request.args.get('amount') or \"5000\"\n if 'card_number' not in session:\n response.redirect('/get_card_number')\n elif 'expiry' not in session:\n response.redirect('/get_expiry')\n elif 'cvv' not in session:\n response.redirect('/get_cvv')\n else:\n call_sid = request.form.get('CallSid')\n session['call_sid'] = call_sid\n response.redirect('/process_payment')\n\n return str(response)", "def create_account_payment(self, order, user):\n access_token = get_random_string(20)\n domain = SysConfig.get_config('DOMAIN')\n\n with transaction.atomic():\n payment_txn = Transaction.objects.create(gateway=self.gateway,\n order=order,\n description='Transaction for order #%s' % order.id,\n status=Transaction.STATUS_PROCESSING,\n currency=order.currency.code,\n amount=order.charge_amount,\n updated_by=unicode(user),\n created_by=unicode(user))\n payment_txn.add_param('access_token', access_token, user)\n payment_txn.save()\n\n try:\n payment = {\n 'intent': 'sale',\n 'redirect_urls': {\n 'return_url': 'http://%s%s' % (domain, reverse('payments_process_account_success',\n args=[payment_txn.id, access_token])),\n 'cancel_url': 'http://%s%s' % (domain, reverse('payments_process_account_cancel',\n args=[payment_txn.id, access_token])),\n },\n 'payer': {\n 'payment_method': 'paypal',\n },\n 'transactions': [{\n 'item_list': {\n 'items': [{\n 'name': item.product.name,\n 'sku': item.product.name,\n 'price': _exchange_amount(item.price, order.exchange_rate),\n 'currency': order.currency.code,\n 'quantity': item.quantity\n } for item in order.items.all()]\n },\n 'amount': {\n 'total': unicode(order.charge_amount),\n 'currency': order.currency.code,\n 'details': {\n 'subtotal': _exchange_amount(order.sub_total, order.exchange_rate),\n 'tax': _exchange_amount(order.taxes, order.exchange_rate),\n 'shipping': _exchange_amount(order.shipping_cost, order.exchange_rate)\n }\n },\n 'description': 'Payment for order #%s' % (order.id)\n }],\n }\n\n logger.info('Processing PayPal account.', extra=payment)\n payment = paypalrestsdk.Payment(payment, api=self.api)\n payment_created = payment.create()\n except Exception as e:\n logger.error('Failed to process PayPal account (transaction_id: %s)' % payment_txn.id)\n logger.exception(e)\n\n raise DoorstepError('We failed to process your PayPal account at the moment, please try again later!')\n\n if payment_created:\n with transaction.atomic():\n payment_txn.add_param('id', unicode(payment.id), user)\n payment_txn.add_param('create_time', unicode(payment.create_time), user)\n payment_txn.add_param('update_time', unicode(payment.update_time), user)\n payment_txn.add_param('state', unicode(payment.state), user)\n payment_txn.add_param('intent', unicode(payment.intent), user)\n payment_txn.add_param('payment_method', unicode(payment.payer.payment_method), user)\n payment_txn.save()\n\n for link in payment.links:\n if link.rel == 'approval_url' and link.method == 'REDIRECT':\n return link.href\n\n payment_txn.status = Transaction.STATUS_FAILED\n payment_txn.error_message = payment.error['message']\n payment_txn.save()\n\n raise DoorstepError('We failed to process your PayPal account at the moment, please try again later!')", "def _prepare_wsdl_objects(self):\r\n\r\n\t# Default behavior is to not request transit information\r\n\tself.ReturnTransitAndCommit = False\r\n\r\n # This is the primary data structure for processShipment requests.\r\n self.RequestedShipment = self.client.factory.create('RequestedShipment')\r\n self.RequestedShipment.ShipTimestamp = datetime.now()\r\n \r\n TotalWeight = self.client.factory.create('Weight')\r\n # Start at nothing.\r\n TotalWeight.Value = 0.0\r\n # Default to pounds.\r\n TotalWeight.Units = 'LB'\r\n # This is the total weight of the entire shipment. Shipments may\r\n # contain more than one package.\r\n self.RequestedShipment.TotalWeight = TotalWeight\r\n \r\n # This is the top level data structure for Shipper information.\r\n ShipperParty = self.client.factory.create('Party')\r\n ShipperParty.Address = self.client.factory.create('Address')\r\n ShipperParty.Contact = self.client.factory.create('Contact')\r\n \r\n # Link the ShipperParty to our master data structure.\r\n self.RequestedShipment.Shipper = ShipperParty\r\n\r\n # This is the top level data structure for Recipient information.\r\n RecipientParty = self.client.factory.create('Party')\r\n RecipientParty.Contact = self.client.factory.create('Contact')\r\n RecipientParty.Address = self.client.factory.create('Address')\r\n \r\n # Link the RecipientParty object to our master data structure.\r\n self.RequestedShipment.Recipient = RecipientParty\r\n \r\n Payor = self.client.factory.create('Payor')\r\n # Grab the account number from the FedexConfig object by default.\r\n Payor.AccountNumber = self._config_obj.account_number\r\n # Assume US.\r\n Payor.CountryCode = 'US'\r\n \r\n ShippingChargesPayment = self.client.factory.create('Payment')\r\n ShippingChargesPayment.Payor = Payor\r\n\r\n self.RequestedShipment.ShippingChargesPayment = ShippingChargesPayment\r\n \r\n # ACCOUNT or LIST\r\n self.RequestedShipment.RateRequestTypes = ['ACCOUNT'] \r\n \r\n # Start with no packages, user must add them.\r\n self.RequestedShipment.PackageCount = 0\r\n self.RequestedShipment.RequestedPackageLineItems = []\r\n \r\n # This is good to review if you'd like to see what the data structure\r\n # looks like.\r\n self.logger.debug(self.RequestedShipment)", "def input_payment_details(self):\n pass", "def post(self, args):\n with api.commit_or_abort(\n db.session,\n default_error_message=\"Failed to create a new Payment\"\n ):\n payment = Payment(**args)\n db.session.add(payment)\n return payment", "def __init__(self, payment_id, camper_id = '', camp_id = '', payment_date = '', paid_amount = ''):\r\n self.__payment_id__ = payment_id\r\n self.__camper_id__ = camper_id\r\n self.__camp_id__ = camp_id\r\n self.__payment_date__ = payment_date\r\n self.__paid_amount__ = paid_amount", "def create_order(self, serializer):\n data = serializer.validated_data\n service: Service = data['service']\n customer: Customer = Customer.objects.get_or_create(\n email=data['email'])[0]\n invoice: Invoice = Invoice(\n charged_amount=service.price.amount,\n currency=service.price.currency,\n timestamp=now(),\n customer=customer,\n service=service\n )\n invoice.save()\n serializer.validated_data['invoice_id'] = invoice.id\n serializer.save()\n\n self.send_order_email(invoice, serializer.instance)", "def _generate_transaction(\n payment: Payment,\n kind: str,\n amount: Decimal,\n *,\n id='',\n is_success=True,\n **data) -> Transaction:\n transaction = create_transaction(\n payment=payment,\n kind=kind,\n amount=amount,\n currency=data.pop('currency', payment.currency),\n gateway_response=data,\n token=id,\n is_success=is_success)\n return transaction", "def create(self, odometryType): # real signature unknown; restored from __doc__\n pass", "def __init__(self, paymentDict):\n self.createdTime = calendar.timegm(\n time.strptime(paymentDict['created_time'], '%Y-%m-%dT%XZ'))\n self.actor = paymentDict['actor']\n self.target = paymentDict['target']", "def post(self, payment_id=None):\n data = request.get_json()\n redirect_url = data.get('redirect_url')\n cart_token = data.get('cart_token')\n address_id = data.get('address_id')\n \n cart = Cart.query.filter_by(token=cart_token, user_id=current_user.id).first()\n if not cart:\n return {\"message\":\"No cart with this id\"}, 404\n\n if not address_id:\n return {\"message\": \"Please enter a address for your order\"}, 404\n\n order = Order.create_from_cart(cart_token, address_id)\n payment = Payment.query.filter_by(order_id=order.id).first()\n if not payment:\n payment = Payment(\n user_id=current_user.id, \n order_id=order.id, \n amount=order.total,\n status='Pending'\n )\n\n db.session.add(payment)\n db.session.commit()\n\n client = Client(current_app.config['ZARINPAL_WEBSERVICE'])\n mail = current_user._email\n\n if not mail:\n return {\"message\": \"Please enter your email address to continue the payment\"}\n\n user_info = UserAddress.query.filter_by(id=address_id).first()\n if user_info.phone:\n mobile = user_info.phone\n else:\n mobile = '' \n\n result = client.service.PaymentRequest(current_app.config['MERCHANT_ID'],\n payment.amount,\n 'nani',\n mail,\n mobile,\n redirect_url)\n\n payment.authority = result.Authority\n db.session.commit()\n if result.Status == 100:\n return {'payment_url':'https://www.zarinpal.com/pg/StartPay/' + result.Authority}\n else:\n return {\n 'message':\"We can't connect you to zarin pal server, right now. Please try again in a few moments.\"\n }, 404", "def no_payment_required(self):" ]
[ "0.71465117", "0.69827616", "0.6701402", "0.66829276", "0.66817296", "0.6674573", "0.6672829", "0.6573561", "0.640586", "0.64023817", "0.63429075", "0.6279454", "0.6221061", "0.6155795", "0.61184585", "0.61052555", "0.6012806", "0.5976966", "0.59646237", "0.5943552", "0.59364045", "0.59259623", "0.58807135", "0.5871875", "0.5862735", "0.58574754", "0.58526474", "0.58414966", "0.58339685", "0.5816768" ]
0.7246261
0
Determine the frequencies and normalized eigenvectors of a matrix
def freq_and_modes(matrix, k_over_m): val, vect = np.linalg.eigh(matrix) #for different masses, use eig with no h freq = np.sqrt(val) return freq, vect
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eigen(M):\n values, vectors = np.linalg.eig(M)\n return values, vectors", "def get_eigenvectors(self):\n return self.eigenVectors", "def GetEigenvectors(self):\n\t\treturn self.Solver.GetEigenvectors()", "def get_eigen_values_and_vectors(matrix, num_values):\n (w, v) = eigen_decomp(matrix)\n eigen_values = []\n eigen_vectors = []\n ### YOUR CODE HERE\n max_indexs=np.argpartition(w, -num_values)\n max_indexs=max_indexs[-num_values:]\n ids=np.argsort(w[max_indexs])\n sort_index=max_indexs[ids]\n eigen_values=w[sort_index]\n eigen_vectors=v[:,sort_index]\n ### END YOUR CODE\n return eigen_values, eigen_vectors", "def numpy_eigenvectors(A):\n import numpy\n A = numpy.array(A)\n E, V = numpy.linalg.eigenvectors(A)\n import Numeric\n E = Numeric.array(E)\n V = Numeric.array(V)\n return E, V", "def solve_for_eigenvectors(matrix, num, mode=\"general\"):\n\n # Construct a sparse matrix\n if mode == \"general\":\n return linalg.eigs(matrix, num)\n\n if mode == \"symmetric\":\n return linalg.eigsh(matrix, num)", "def get_eigenvectors(self):\n return self._eigenvectors", "def calc_eigenvectors(verts: [[float]]):\n A = np.zeros((3, len(verts)))\n\n A[0] = np.array([x[0] for x in verts]) # First row is all the X_coords\n A[1] = np.array([x[1] for x in verts]) # second row is all the Y_coords\n A[2] = np.array([x[2] for x in verts]) # third row is all the z-coords\n \n A_cov = np.cov(A) # This is returns a 3x3\n eigenvalues, eigenvectors = np.linalg.eigh(A_cov)\n\n return eigenvalues, eigenvectors", "def eig(self,manifold_num):\n num_sites = len(self.energies[manifold_num])\n ham = self.manifold_hamiltonian(manifold_num).toarray()\n eigvals, eigvecs = eigh(ham)\n # Force degenerate eigenvectors to be orthogonal\n if self.qr_flag:\n eigvecs, r = np.linalg.qr(eigvecs,mode='reduced')\n if self.check_eigenvectors:\n HV = ham.dot(eigvecs)\n D = eigvecs.T.dot(HV)\n if np.allclose(D,np.diag(eigvals),rtol=1E-11,atol=1E-11):\n pass\n else:\n # warnings.warn('Eigenvalues altered by QR factorization, max absolute change in diagonal matrix of {}'.format(np.max(D-np.diag(eigvals))))\n warnings.warn('Using eigenvectors to diagonalize hamiltonian does not result in the expected diagonal matrix to tolerance, largest deviation is {}'.format(np.max(np.abs(D - np.diag(eigvals)))))\n \n sort_indices = eigvals.argsort()\n eigvals.sort()\n eigvecs = eigvecs[:,sort_indices]\n if self.qr_flag:\n r = r[:,sort_indices]\n self.r_mats.append(r)\n # I choose to pick the phase of my eigenvectors such that the state which has the\n # largest overlap has a positive overlap. For sufficiently small d, and alpha close\n # to 1, this will be the overlap between the same excited and ground states.\n for i in range(eigvals.size):\n max_index = np.argmax(np.abs(eigvecs[:,i]))\n if eigvecs[max_index,i] < 0:\n eigvecs[:,i] *= -1\n\n return eigvals, eigvecs", "def posdef_eig_svd(mat):\n evals, evecs, _ = tf.svd(mat)\n\n return evals, evecs", "def eigensystem(mat):\n e, v = numpy.linalg.eig(mat)\n\n # `eig` returns complex results but we know all of the\n # eigenstates have real energy.\n e = numpy.real(e)\n\n items = zip(e, v.T)\n items = sorted(items, key = operator.itemgetter(0))\n e, v = zip(*items)\n\n return (e, v)", "def eigen_decomposition(X, features):\n # Center to average\n Xctr = X - X.mean(0)\n # covariance matrix\n Xcov = np.cov(Xctr.T)\n\n # Compute eigenvalues and eigenvectors\n eigen_values, eigen_vectors = sp.linalg.eigh(Xcov)\n\n # Sort the eigenvalues and the eigenvectors descending\n sortedindex = np.argsort(eigen_values)[::-1]\n eigen_values = eigen_values[sortedindex]\n eigen_vectors = eigen_vectors[:, sortedindex]\n\n ###########\n y_pos = np.arange(len(features))\n weight = eigen_vectors[0]\n\n figure, axis = plt.subplots(2, 1)\n\n axis[0].bar(features, eigen_vectors[0])\n plt.setp(axis[0], title=\"First and Second Component's Eigenvectors \", ylabel='Weight')\n axis[0].set_xticks(features, features)\n axis[1].bar(features, eigen_vectors[1])\n axis[1].set_xticks(features, features)\n plt.setp(axis[1], ylabel='Weight')\n # axis[0].bar(y_pos, weight, align='center', alpha=0.5)\n # axis[0].xticks(y_pos, features)\n # axis[0].ylabel('Weight')\n # axis[0].title('Features')\n #\n # axis[1].bar(y_pos, weight, align='center', alpha=0.5)\n # axis[1].xticks(y_pos, features)\n # axis[1].ylabel('Weight')\n # axis[1].title('Features')\n\n plt.show()\n # return eigen_values, eigen_vectors", "def calculate_eigenvalues(H):\n eigenvalues, eigenvectors = np.linalg.eigh(H)\n return eigenvalues, eigenvectors", "def eig_faces(u_mat, nmode, dim):\n n = int(nmode)\n nparray = np.zeros(np.size(u_mat[:,0]))\n for i in range(n):\n nparray = nparray + u_mat[:,i]\n \n nparray = np.reshape(nparray,dim)\n return(nparray)", "def get_eigen_values_and_vectors(matrix, num_values):\n w, v = eigen_decomp(matrix)\n eigen_values = []\n eigen_vectors = []\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n return eigen_values, eigen_vectors", "def eigen_decomp(matrix):\n w = None\n v = None\n ### YOUR CODE HERE\n w,v=np.linalg.eig(matrix)\n ### END YOUR CODE\n return w, v", "def eig_vals_vects_hermitian(matrix, sort='imag'):\n # if len(matrix) < 10:\n # print '\\nFinding eigvals, matrix = ', matrix\n eigval, eigvect = np.linalg.eig(matrix)\n # use imaginary part to get ascending order of eigvals\n if sort == 'imag':\n si = np.argsort(np.imag(eigval))\n elif sort == 'real':\n si = np.argsort(np.real(eigval))\n else:\n si = np.arange(len(eigval))\n\n eigvect = np.array(eigvect)\n eigvect_out = eigvect.T[si]\n eigval_out = eigval[si]\n if len(eigval_out) < 10:\n print 'eigvals return as =', eigval_out\n return eigval_out, eigvect_out", "def eig_vals_vects(self, matrix=None, attribute=False, attribute_matrix=False):\n if matrix is None:\n matrix = self.get_matrix(attribute=attribute_matrix)\n eigval, eigvect = np.linalg.eig(matrix)\n si = np.argsort(np.imag(eigval))\n eigvect = np.array(eigvect)\n eigvect = eigvect.T[si]\n eigval = eigval[si]\n\n if attribute:\n self.eigvect = eigvect\n self.eigval = eigval\n\n # print 'np.shape(eigvect) = ', np.shape(eigvect)\n # sys.exit()\n return eigval, eigvect", "def eig_vals_vects(matrix, sort='imag', not_hermitian=True, verbose=False):\n # if len(matrix) < 10:\n # print '\\nFinding eigvals, matrix = ', matrix\n\n # check if hermitian:\n if not_hermitian:\n eigval, eigvect = np.linalg.eig(matrix)\n else:\n if (matrix == matrix.conj().T).all():\n if verbose:\n print 'Shortcut eigvect/vals since matrix is hermitian...'\n eigval, eigvect = np.linalg.eigh(matrix)\n else:\n if verbose:\n print 'matrix is not hermitian...'\n eigval, eigvect = np.linalg.eig(matrix)\n\n # use imaginary part to get ascending order of eigvals\n if sort == 'imag':\n si = np.argsort(np.imag(eigval))\n elif sort == 'real':\n si = np.argsort(np.real(eigval))\n else:\n si = np.arange(len(eigval))\n\n eigvect = np.array(eigvect)\n eigvect_out = eigvect.T[si]\n eigval_out = eigval[si]\n\n # if len(eigval_out) < 10:\n # print 'eigvals return as =', eigval_out\n\n return eigval_out, eigvect_out", "def eigenvects(mat):\n # Check if symbols are present\n if hasSymbols(mat):\n return mat.eigenvects()\n # Purely numeric matrix\n newMat = recursiveEvaluate(mat.as_mutable())\n return newMat.eigenvects()", "def test_svd(self):\n eigenvectors, eigenvalues = self.svd.run(self.test_matrix)\n\n self.assertEqual(eigenvectors.shape, (100, 100))\n self.assertEqual(eigenvalues.shape, (100,))", "def calculate_eigenvalues(self):\n self.__eigenvalues = []\n dictionary = np.linalg.eig(np.array(self.__A))\n indicator = True\n sum1 = 0\n for i in range(self.__A.shape[0]):\n if all(self.__A[i, j] == 0 for j in range(self.__A.shape[1])):\n indicator = all(self.__B[i,j] for j in range(self.__B.shape[1]))\n if (indicator):\n sum1 += 1\n \n for val in dictionary[0]:\n if (val != 0):\n self.__eigenvalues.append(complex(val))\n elif (indicator) and (sum1 > 0):\n sum1 -= 1\n self.__eigenvalues.append(complex(val))", "def compute_spectrum(P):\r\n evals, lvecs= linalg.eig(P,right=False, left=True)\r\n\r\n lvecs = lvecs/lvecs.sum(axis=0, keepdims=True)\r\n \r\n return evals, lvecs", "def right_eigenvectors(matrix, nvals=None):\n matrix = np.asarray(matrix)\n return _eigenvectors(matrix, nvals)", "def project_to_eigenvectors(X, vecs):\n\n return (X-np.mean(X, axis=0)).dot(np.transpose(vecs)) #PCA assumes that the data is centered, so we need to do that before doing the calculations", "def getPCA(matrix):\n eVal, eVec = np.linalg.eigh(matrix)\n indices = eVal.argsort()[::-1] # arguments for sorting eVal desc\n eVal, eVec = eVal[indices], eVec[:, indices]\n eVal = np.diagflat(eVal)\n return eVal, eVec", "def fit_evd(self):\n\n # EVD only work on square matrices as we need to compute the eigenvalues and eigenvectors\n # For this we compute the covariance matrix K\n # K should be n x n matrix (pixels x pixels)\n\n # The covariance matrix is nxn\n self.cov_matrix = np.zeros(shape=[self.n_features, self.n_features], dtype='uint8')\n\n self.cov_matrix = np.cov(self.norm_matrix, rowvar=False)\n # C is a symmetric matrix and so it can be diagonalized:\n eig_val, eig_vec = linalg.eig(self.cov_matrix)\n\n # Sorting the eigenvectors by decreasing eigenvalues\n # [Start : stop : stepcount] stepcount is reversed\n idx = eig_val.argsort()[::-1]\n eig_val, eig_vec = eig_val[idx], eig_vec[:, idx]\n\n # Explained_variance tell us how much of the variance in the data each eigen value explains\n explained_variance = eig_val / (self.n_samples - 1)\n # total_var is the total variance in the data\n total_var = explained_variance.sum()\n explained_variance_ratio = explained_variance / total_var\n # The cumulative sum of all ratios\n ratio_cumsum = np.cumsum(explained_variance_ratio)\n\n # We search in the cumsum for the index of the value which, when added, corresponds to the quality_percent\n # The index of the cumsum gives us the components we need to add to explain X quality percent of our data\n n_components = np.searchsorted(ratio_cumsum, self.quality_percent, side='right') + 1\n\n self.components = eig_vec[:n_components]\n print(\"The principal components have been calculated using eigendecomposition\", self.components.shape)\n\n return self.components", "def explained_variance_ratio(X, eigenvectors, eigenvalues):\n return np.sum(eigenvalues)/total_variance(X)", "def decomposition_method(matrix):\n x, y, z = 0, 1, 2 # indices\n K = np.array([\n [R[x, x]-R[y, y]-R[z, z], R[y, x]+R[x, y], R[z, x]+R[x, z], R[y, z]-R[z, y]],\n [R[y, x]+R[x, y], R[y, y]-R[x, x]-R[z, z], R[z, y]+R[y, z], R[z, x]-R[x, z]],\n [R[z, x]+R[x, z], R[z, y]+R[y, z], R[z, z]-R[x, x]-R[y, y], R[x, y]-R[y, x]],\n [R[y, z]-R[z, y], R[z, x]-R[x, z], R[x, y]-R[y, x], R[x, x]+R[y, y]+R[z, z]]\n ])\n K = K / 3.0\n\n e_vals, e_vecs = np.linalg.eig(K)\n print('Eigenvalues:', e_vals)\n print('Eigenvectors:', e_vecs)\n max_index = np.argmax(e_vals)\n principal_component = e_vecs[max_index]\n return principal_component", "def left_eigenvectors(matrix, nvals=None):\n matrix = np.asarray(matrix)\n return _eigenvectors(matrix.transpose(), nvals)" ]
[ "0.6904609", "0.6858", "0.6851799", "0.6815563", "0.68081987", "0.6753682", "0.66808695", "0.6597894", "0.6562375", "0.6542027", "0.65396744", "0.6537718", "0.65263975", "0.6518386", "0.6516741", "0.65053654", "0.6428052", "0.6415041", "0.63126284", "0.627554", "0.6254286", "0.6214199", "0.6214175", "0.62126297", "0.62003195", "0.6162638", "0.6130043", "0.6096532", "0.60640913", "0.6058891" ]
0.69194525
0
Function to make a plot of motion of masses as a function of time. The time should be on the vertical axis and the position on the horizontal axis.
def plot_motion_of_masses(x, time, title='bad title'): # Nothing special about these, but they look nice x1_equilibrium_pos = 3 x2_equilibrium_pos = 6 x1 = x[:, 0] + x1_equilibrium_pos x2 = x[:, 1] + x2_equilibrium_pos plt.plot(x1, time, label='Mass 1') plt.plot(x2, time, label='Mass 2') plt.xlabel("Position") plt.ylabel("Time") plt.xlim(0, 9) plt.legend() plt.title(title)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_motion(x, y):\n plt.xlabel(\"X Position (m)\")\n plt.ylabel(\"Y Position (m)\")\n plt.plot(x, y)\n plt.show()", "def plot_x(t, x):\n plt.figure()\n plt.plot(t, x)\n plt.title(\"Vertical position of the skydiver as a function of time\")\n plt.xlabel(\"Time t [s]\")\n plt.ylabel(\"Height [m]\")\n plt.savefig('Parachute_position.png')", "def plot_ins_state(time, state):\n pylab.ion()\n\n plot_trajectory(state[:,0], state[:,1], state[:,2])\n\n\n # Plot position vs. time\n\n\n pylab.figure()\n pylab.subplot(311)\n pylab.plot(time, state[:,0],'r')\n pylab.xlabel('time (s)')\n pylab.ylabel('$\\\\phi$, rad')\n pylab.title('Latitude')\n pylab.grid(True)\n\n pylab.subplot(312)\n pylab.plot(time, state[:,1],'g')\n pylab.xlabel('time (s)')\n pylab.ylabel('$\\\\lambda$, rad')\n pylab.title('Longitude')\n pylab.grid(True)\n\n pylab.subplot(313)\n pylab.plot(time, state[:,2],'b')\n pylab.xlabel('time, s')\n pylab.ylabel('$h$, m')\n pylab.title('Altitude')\n pylab.grid(True)\n pylab.show()\n\n\n # Plot velocity vs. time\n pylab.figure()\n pylab.plot(time, state[:,3:6])\n pylab.xlabel('time, s')\n pylab.ylabel('Vn, Ve, Vd')\n pylab.title('Velocity vs. time')\n\n pylab.grid(True)\n pylab.show()\n\n # Plot acceleration vs. time\n pylab.figure()\n pylab.plot(time, state[:,6:9])\n pylab.xlabel('time, s')\n pylab.ylabel('an, ae, ad')\n pylab.title('Acceleration vs. time')\n\n pylab.grid(True)\n pylab.show()\n pylab.ioff()\n\n # Plot quaternions vs. time\n pylab.figure()\n pylab.plot(time, state[:,9:])\n pylab.xlabel('time, s')\n pylab.ylabel('q0, q1, q2, q3')\n pylab.title('Quaternion vs. time')\n\n pylab.grid(True)\n pylab.show()\n pylab.ioff()", "def plot_muscle_activations(side,t_start,t_stop):\n \n time, ankle_l_trajectory, ankle_r_trajectory,foot_l_contact,foot_r_contact,muscle_lh_activations, muscle_rh_activations,muscle_lh_forces,muscle_rh_forces,joint_lh_positions,joint_rh_positions = load_data()\n\n index_start = np.where(time == t_start)[0][0]\n index_end = np.where(time == t_stop)[0][0]\n \n time = time[index_start:index_end+1]\n muscle_rh_activations = muscle_rh_activations[index_start:index_end+1,:]\n muscle_lh_activations = muscle_lh_activations[index_start:index_end+1,:]\n \n #time=np.linspace(1,len(ankle_l_trajectory[:,0]),len(ankle_l_trajectory[:,0]));\n if side =='right':\n muscle_activations = muscle_rh_activations\n elif side == 'left':\n muscle_activations = muscle_lh_activations \n else:\n return\n \n plt.figure('Muscle activations')\n plt.subplot(421)\n plt.plot(time,muscle_activations[:,0])\n plt.title('Muscle PMA')\n #plt.xlabel('Time [s]')\n plt.ylabel('Muscle activation')\n\n plt.subplot(422)\n plt.plot(time,muscle_activations[:,1])\n plt.title('Muscle CF')\n #plt.xlabel('Time [s]')\n #plt.ylabel('Muscle activation') \n\n plt.subplot(423) \n plt.plot(time,muscle_activations[:,2])\n plt.title('Muscle SM')\n #plt.xlabel('Time [s]')\n plt.ylabel('Muscle activation')\n \n plt.subplot(424) \n plt.plot(time,muscle_activations[:,3])\n plt.title('Muscle POP')\n #plt.xlabel('Time [s]')\n #plt.ylabel('Muscle activation') \n \n plt.subplot(425) \n plt.plot(time,muscle_activations[:,4])\n plt.title('Muscle RF')\n# plt.xlabel('Time [s]')\n plt.ylabel('Muscle activation') \n\n plt.subplot(426) \n plt.plot(time,muscle_activations[:,5])\n plt.title('Muscle TA')\n# plt.xlabel('Time [s]')\n #plt.ylabel('Muscle activation') \n \n plt.subplot(427) \n plt.plot(time,muscle_activations[:,6])\n plt.title('Muscle SOL')\n plt.xlabel('Time [s]')\n plt.ylabel('Muscle activation') \n \n plt.subplot(428) \n plt.plot(time,muscle_activations[:,7])\n plt.title('Muscle LG')\n plt.xlabel('Time [s]')\n \n plt.subplots_adjust(hspace=0.5)\n #plt.ylabel('Muscle activation') \n# plt.suptitle('Decomposition of the trajectories of the hind feet')\n \n# plt.suptitle('Muscle activations of the '+ side + ' limb')\n plt.show()\n return", "def plot_v(t, v):\n p1 = plt.plot(t,v)\n plt.xlabel('Time [s]')\n plt.ylabel('Velocity [m/s]')\n plt.title('Velocity for the skydiver as a function of time')\n plt.show()\n plt.savefig('Parachute_velocity.png')", "def plotTime(self):\n plt.figure()\n t = [i for i in range(len(self.nodes_infected))]\n print(t)\n plt.title('Nodos infectados vs Tiempo')\n plt.xlabel('Instantes de tiempo')\n plt.ylabel('# de nodos infectados')\n plt.plot(t, self.nodes_infected)\n plt.grid(True)\n plt.show()", "def mlmc_massplot():\n out = parse_output_allg([\"t(\", \"Energy\", \"Mass\", \"InFlowRate\", \"OutFlowRate\"])\n t = out[0]\n mass = out[2]\n inflow = out[3]\n dt_3 = 64\n ndt = [dt_3, 2 * dt_3, 4 * dt_3]\n l = [3, 4, 5]\n nl = [10, 0, 0]\n n = 0\n colors = [\"darkred\", \"red\", \"#ff9966\"]\n mconly = True\n\n for i in range(len(l)):\n fig = plt.figure(i)\n for j in range(nl[i]):\n plt.plot(t[n:n + ndt[i] + 1], mass[n:n + ndt[i] + 1], label=\"Level \" + str(l[i]) + \" Sample \" + str(j))\n if (j == -1):\n plt.plot(t[n:n + ndt[i] + 1], inflow[n:n + ndt[i] + 1], color=\"black\",\n label=\"Inflow Level \" + str(l[i]) + \" Sample \" + str(j))\n n += ndt[i] + 1\n if i > 0 and not mconly:\n plt.plot(t[n:n + ndt[i - 1] + 1], mass[n:n + ndt[i - 1] + 1],\n label=\"Level \" + str(l[i - 1]) + \" Sample \" + str(j))\n n += ndt[i - 1] + 1\n plt.legend()\n\n plt.savefig(\"MLMCMassPlot.jpg\")", "def plot_forces_parachute(t, v, dt, tp, m, a_first, a_last):\n plt.figure()\n drag = zeros(len(v))\n for i in range(len(v)):\n if(i*dt <= tp):\n drag[i] = -m*a_first*abs(v[i])*v[i]\n else:\n drag[i] = -m*a_last*abs(v[i])*v[i]\n grav = [-m*9.81]*len(v)\n Boyancy = [1. * 9.81 * 0.1]*len(v) # rho * g * V\n Fsum = drag+grav+Boyancy\n plt.plot(t, drag, t, grav, t, Boyancy, t, Fsum)\n plt.legend([\"Drag force\", \"Gravity force\", \"Boyancy\", \"Sum of forces\"])\n plt.savefig('Parachute_forces.png')", "def plot_msd(msd, h_exp):\n fig, ax = plt.subplots(1, 2, figsize = (10, 10))\n av_msd = np.mean(msd, axis = 0)\n\n for p in np.arange(0, msd.shape[0], step = 1):\n for t in np.arange(0, msd.shape[1], step = 1): \n ax[0].plot(t, msd[p, t], 'bx')\n ax[1].plot(t, av_msd[t], 'ro')\n ax[0].set_xlabel('Time lag (number of steps)')\n ax[0].set_ylabel('MSD (pix^2)')\n ax[0].set_title('Individual TAMSDs: H = ' + str(h_exp))\n ax[1].set_xlabel('Time lag (number of steps)')\n ax[1].set_ylabel('MSD (pix^2)')\n ax[1].set_title('Averaged TAMSDs: H = ' + str(h_exp)) \n ax[0].set_xlim([0, np.max(t)])\n ax[1].set_xlim([0, np.max(t)])\n ax[0].set_ylim([0, np.max(msd)]) \n ax[1].set_ylim([0, np.max(av_msd)])", "def plot_time(self, X, x0, t):\n\n Pressure = [Solution(self, (x-x0)/t).pressure for x in X]\n Velocity = [Solution(self, (x-x0)/t).velocity for x in X]\n Density = [Solution(self, (x-x0)/t).rho for x in X]\n\n fig, axs = plt.subplots(3, sharex=True)\n fig.suptitle(\"Solution of the Riemann problem\\nat t = {}s\".format(t))\n axs[0].plot(X, Density)\n axs[1].plot(X, Velocity)\n axs[2].plot(X, Pressure)\n\n axs[0].grid()\n axs[0].set(ylabel = \"Density\")\n axs[1].grid()\n axs[1].set(ylabel = \"Velocity\")\n axs[2].grid()\n axs[2].set(ylabel = \"Pressure\")\n\n plt.xlabel(\"Location x\")", "def plot(axes, axis, values, c='chartreuse'):\n a = axes[axis]\n a.set_xlabel('time (s)')\n x = np.array(range(len(values))) / 1000\n dim = 'x' if axis == 0 else 'y' if axis == 1 else 'z'\n a.set_title('-'.join([dim, 'acceleration']))\n a.plot(x, values / 1000, c=c)", "def one_period_plot():\n file = \"Data/matfiles/20131221.mat\"\n object = MatReader(file)\n\n NeA = object.NeA\n latA = object.latA\n times = object.secondsA\n mlt = object.mltA\n ind1 = 2606 #lat inds\n ind2 = 13940 #lat inds\n \n ind1 = 3197 #mlat inds\n ind2 = 14390 #mlat inds\n \n T = ind2 - ind1\n ind1 += int(T/2)\n ind2 += int(T/2)\n\n latA = latA[ind1:ind2]\n NeA = NeA[ind1:ind2]\n # NeA = object.meanie(NeA, 5)\n times = times[ind1:ind2]\n mlt = mlt[ind1:ind2]\n mlt = hour_round(mlt)\n\n lats = np.zeros_like(latA)\n lats[0] = latA[0]\n for i in range(len(latA)-1):\n dlat = latA[i+1] - latA[i]\n if dlat < 0:\n lats[i+1] = lats[i] - dlat\n else:\n lats[i+1] = lats[i] + dlat\n\n lats += 90\n\n xticks = np.array([-90, -70, -30, 30, 70, 110, 150, 210, 250, 270]) + 90\n gridticks = np.array([-90, -70, -30, 30, 70, 77, 103, 110, 150, 210, 250, 270]) + 90\n # plt.plot(lats, NeA, \".\", markersize = 1)\n # plt.plot([0, 0], [0, np.max(NeA)], \"k\")\n # plt.plot([30, 30], [0, np.max(NeA)], \"k\")\n # plt.plot([60, 60], [0, np.max(NeA)], \"k\")\n # plt.plot([120, 120],[0, np.max(NeA)], \"k\")\n # plt.plot([150, 150], [0, np.max(NeA)], \"k\")\n # plt.plot([167, 167], [0, np.max(NeA)], \"k\")\n # plt.plot([193, 193], [0, np.max(NeA)], \"k\")\n # plt.plot([210, 210], [0, np.max(NeA)], \"k\")\n # plt.plot([240, 244], [0, np.max(NeA)], \"k\")\n # plt.plot([300, 300], [0, np.max(NeA)], \"k\")\n # plt.plot([330, 330], [0, np.max(NeA)], \"k\")\n # plt.plot([360, 360], [0, np.max(NeA)], \"k\")\n # plt.xticks(xticks)\n # plt.xlabel(\"Geomagnetic latitude going from 0 to 360 degrees, starting and ending at south pole\")\n # plt.ylabel(\"Electron density [cm$^{-1}$]\")\n # plt.title(\"One SWARM satellite period\")\n # plt.grid(\"on\", axis = \"x\", xdata = gridticks)\n #adding letters\n x = (gridticks[:-1] + gridticks[1:])/2 - 3\n y = np.zeros_like(x) - np.max(NeA)/40\n s = [\"S\", \"B\", \"A\", \"B\", \"C\", \"D\", \"C\", \"B\", \"A\", \"B\", \"S\"]\n # for i in range(len(x)):\n # plt.text(x[i], y[i], s[i], fontsize = 10)\n # plt.savefig(\"Figures/swarm_period.pdf\")\n # plt.show()\n\n # plt.plot(times, latA)\n # plt.plot(times, mlt)\n # plt.show()\n print(lats[0])\n print(lats[-1])\n \n fig, ax = plt.subplots()\n ax.plot(lats, NeA, \".\", markersize = 1)\n ax.set_xticks(xticks, minor=False)\n ax.set_xticks([167, 193], minor=True)\n ax.xaxis.grid(True, which = \"major\")\n ax.xaxis.grid(True, which = \"minor\")\n for i in range(len(x)):\n ax.text(x[i], y[i], s[i], fontsize = 10)\n ax.set_xlabel(\"Geomagnetic latitude going from 0 to 360 degrees, starting and ending at south pole\")\n ax.set_ylabel(\"Electron density [cm$^{-1}$]\")\n ax.set_title(\"One Swarm satellite period\")\n # plt.savefig(\"Figures/swarm_period.pdf\")\n plt.show()\n plt.plot(mlt, NeA)\n plt.show()\n plt.plot(mlt, lats)\n plt.show()", "def position(t, x, y):\n return x * exp(-t * y) * sin(2 * pi * t)", "def plot_matrix_method(pulse, trap, ToP):\n n0, d = trap.matrix_method(pulse)\n for k in range(len(d)):\n ave_list = []\n timestep = np.arange(0, trap.N+1, 1)\n for i in range(len(d[k])):\n sum2 = 0\n for j in range(len(d[k][i])):\n sum2 += (j) * d[k][i][j]\n ave_list.append(sum2)\n if ToP == 'a':\n plt.plot(timestep * pulse.t * 1e3, ave_list, label = pulse.t)\n if ToP == 'b':\n plt.plot(timestep * pulse.t * 1e3, ave_list, color = 'black', label = 'Matrix')\n if ToP == 'c':\n plt.plot(timestep * pulse.t * 1e3, ave_list, color = 'b')\n # plt.legend()\n # plt.xlabel('time (ms)')\n # plt.ylabel('n')\n #plt.xlim(0, 10) ", "def trajectory1(self):\r\n\r\n trackt = [] # particle trajectory,\r\n trackx = [] # particle trajectory\r\n an = [] # analitical s**2 + x**2 = t**2\r\n s1 = [] # s = 10; s = 0, light\r\n s2 = [] # s = 20;\r\n s3 = [] # s = 40;\r\n for i in range(0, len(self.dt.obs.obt_g)):\r\n trackt.append(float(i))\r\n trackx.append(self.dt.x[i])\r\n an.append(math.sqrt(float(i) ** 2 + self.dt.x[i] ** 2))\r\n s1.append(math.sqrt(1.0 ** 2 + self.dt.x[i] ** 2))\r\n s2.append(math.sqrt(2.0 ** 2 + self.dt.x[i] ** 2))\r\n s3.append(math.sqrt(4.0 ** 2 + self.dt.x[i] ** 2))\r\n\r\n # plots:\r\n\r\n (fig, ax) = plt.subplots() # figsize=(7,5)\r\n\r\n # trajectory\r\n\r\n ax.plot(\r\n trackx,\r\n trackt,\r\n marker='+',\r\n linewidth=1,\r\n linestyle='-',\r\n color='green',\r\n label='treck',\r\n )\r\n\r\n # measurement t\r\n # ax.plot(self.dt.x, self.dt.t, marker=\"+\", linestyle=\" \", color=\"blue\", label=\"result of measurement\")\r\n\r\n ax.plot(\r\n self.dt.x,\r\n self.dt.t,\r\n marker='o',\r\n linestyle=' ',\r\n color='black',\r\n label='result of measurement',\r\n )\r\n\r\n # analitical t\r\n\r\n ax.plot(self.dt.x, an, linestyle='-', color='red',\r\n label='continuum')\r\n\r\n # light trajectory\r\n\r\n ax.plot(trackx, trackx, linestyle='-', color='yellow',\r\n label='s=0 (light)')\r\n\r\n # s(x) curves\r\n\r\n ax.plot(\r\n trackx,\r\n s1,\r\n linestyle=':',\r\n linewidth=1,\r\n color='k',\r\n label='s=1.0',\r\n )\r\n ax.plot(\r\n trackx,\r\n s2,\r\n linestyle=':',\r\n linewidth=1,\r\n color='k',\r\n label='s=2.0',\r\n )\r\n ax.plot(\r\n trackx,\r\n s3,\r\n linestyle=':',\r\n linewidth=1,\r\n color='k',\r\n label='s=4.0',\r\n )\r\n\r\n # error of measurement t\r\n\r\n ax.errorbar(self.dt.x, self.dt.t, fmt='k ', yerr=self.dt.t_err)\r\n\r\n # signature on the horizontal x-axis\r\n\r\n ax.set_xlabel('x in metres')\r\n xm = -1.0\r\n for i in range(len(self.dt.x)):\r\n if self.dt.x[i] > xm:\r\n xm = self.dt.x[i]\r\n stepx = round(xm / float(len(self.dt.x)), 1)\r\n xm = round(xm + stepx, 1)\r\n ax.set_xlim([0.0, xm])\r\n\r\n # signature on vertical y axis\r\n\r\n ax.set_ylabel('t in metres of light time ')\r\n ym = -1.0\r\n for i in range(len(self.dt.t)):\r\n if self.dt.t[i] > ym:\r\n ym = self.dt.t[i]\r\n stepy = round(ym / float(len(self.dt.t)), 1)\r\n ym = round(ym + stepy, 1)\r\n ax.set_ylim([0.0, ym])\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on x)\r\n\r\n locatorx = matplotlib.ticker.MultipleLocator(base=stepx)\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.xaxis.set_major_locator(locatorx)\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on y)\r\n\r\n locatory = matplotlib.ticker.MultipleLocator(base=stepy)\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.yaxis.set_major_locator(locatory)\r\n\r\n ax.grid()\r\n\r\n # show legend\r\n\r\n ax.legend(loc='upper left')\r\n\r\n # show drawing\r\n\r\n plt.show()", "def plot_avar(time, sigma):\n pylab.figure()\n pylab.loglog(time, sigma,'-o')\n pylab.xlabel('$time (s)$')\n pylab.ylabel('$\\sigma(\\\\tau)$')\n pylab.title('Allan deviation')\n pylab.grid(True)\n pylab.show()", "def plot_motion(motion: Motion, **kwargs: Any) -> mpl.figure.Figure:\n return plot_path(motion.get_path(), **kwargs)", "def plot_trajectories_XYZ(t_start,t_stop):\n \n time, ankle_l_trajectory, ankle_r_trajectory,foot_l_contact,foot_r_contact,muscle_lh_activations, muscle_rh_activations,muscle_lh_forces,muscle_rh_forces,joint_lh_positions,joint_rh_positions = load_data()\n \n index_start = np.where(time == t_start)[0][0]\n index_end = np.where(time == t_stop)[0][0]\n \n time = time[index_start:index_end+1]\n ankle_l_trajectory = ankle_l_trajectory[index_start:index_end+1,:]\n ankle_r_trajectory = ankle_r_trajectory[index_start:index_end+1,:]\n \n #time=np.linspace(1,len(ankle_l_trajectory[:,0]),len(ankle_l_trajectory[:,0]));\n \n plt.figure('Trajectories')\n plt.subplot(311)\n plt.plot(time,ankle_l_trajectory[:,0])\n plt.plot(time,ankle_r_trajectory[:,0])\n #plt.title('Trajectory of the X component')\n plt.xlabel('Time [s]')\n plt.ylabel('X Position [cm]')\n plt.legend(['Left ankle','Right ankle'],loc='upper right')\n \n plt.subplot(312)\n plt.plot(time,ankle_l_trajectory[:,1])\n plt.plot(time,ankle_r_trajectory[:,1])\n #plt.title('Trajectory of the Y component')\n plt.xlabel('Time [s]')\n plt.ylabel('Y Position [cm]')\n plt.legend(['Left ankle','Right ankle'],loc='upper right')\n \n plt.subplot(313)\n plt.plot(time,ankle_l_trajectory[:,2])\n plt.plot(time,ankle_r_trajectory[:,2])\n #plt.title('Trajectory of the Z component')\n plt.xlabel('Time [s]')\n plt.ylabel('Z Position [cm]')\n plt.legend(['Left ankle','Right ankle'],loc='upper right')\n \n# plt.suptitle('Decomposition of the trajectories of the hind feet')\n return", "def animate_system(positions: pd.DataFrame,\n steps: int = 10,\n size: int = 100\n ) -> int:\n\n velocities = pd.DataFrame({'x': [0]*len(positions),\n 'y': [0]*len(positions),\n 'z': [0]*len(positions)})\n\n # Create empty plot\n fig = plt.figure(figsize=(7, 7))\n ax = fig.add_subplot(111, projection='3d')\n x, y, z = [], [], []\n colors = cm.copper(4)\n sc = ax.scatter(x, y, z, s=size)\n\n # Compute all positions\n step_pos = []\n\n for i in range(steps):\n\n # Apply gravity\n velocities = update_velocity(positions, velocities)\n\n # Apply velocity\n positions = positions + velocities\n\n step_pos.append(positions)\n\n # Set plot limits\n allx = [i for sl in [list(p['x']) for p in step_pos] for i in sl]\n ally = [i for sl in [list(p['y']) for p in step_pos] for i in sl]\n allz = [i for sl in [list(p['z']) for p in step_pos] for i in sl]\n ax.set_xlim3d(min(allx), max(allx))\n ax.set_ylim3d(min(ally), max(ally))\n ax.set_zlim3d(min(allz), max(allz))\n\n # Animation\n def animate(i):\n pos = step_pos[i]\n sc._offsets3d = (pos['x'].values,\n pos['y'].values,\n pos['z'].values)\n\n ani = matplotlib.animation.FuncAnimation(fig, animate,\n frames=steps,\n interval=200,\n repeat=False)\n return ani", "def comp_time_plot(p1=database['K+'], p2=database['pi+'], pmax=80, plot=True):\r\n dt = []\r\n p_range = np.linspace(10, pmax, 1000)\r\n m1 = p1.mass\r\n m2 = p2.mass\r\n for p in p_range:\r\n t1_per_m = 76.273/(beta(p, m1)*gamma(p, m1)*c)\r\n t2_per_m = 76.273/(beta(p, m2)*gamma(p, m2)*c)\r\n dt.append(abs(t1_per_m - t2_per_m)*1e12)\r\n dt_12_5 = dt[np.argmin(abs(p_range-12.5))]\r\n dt_75 = dt[np.argmin(abs(p_range-75))]\r\n ratio = dt_12_5/dt_75\r\n if plot==True:\r\n fig = plt.figure(figsize=[10, 5])\r\n ax = fig.add_subplot(1, 1, 1)\r\n ax.plot(p_range, dt, 'b', label=r'$\\Delta t$')\r\n ax.axvline(12.5, color='r', label='p=12.5 GeV')\r\n ax.axvline(75, color='g', label='p=75 GeV')\r\n ax.set_xlim(10, pmax)\r\n ax.set_ylim(0)\r\n ax.set_xlabel('p / GeV', fontsize=20)\r\n# ax.set_yscale('log')\r\n ax.set_ylabel(r'$\\Delta t$ / ps', fontsize=20)\r\n title = f'{p1.name} to {p2.name} '\r\n title += r'$\\Delta t$ dependancy on particle momenta'\r\n ax.set_title(title, fontsize=20)\r\n ax.legend(fontsize=20)\r\n text = 'dt(12.5) = {0:.2f} ps, '.format(dt_12_5)\r\n text += 'dt(75) = {0:.2f} ps, '.format(dt_75)\r\n text += 'ratio = {0:.3f}'.format(ratio)\r\n plt.show()\r\n print(text)\r\n return [dt_12_5, dt_75, ratio]", "def plotConservation2(c, nt, massesVector, methodName):\n # generate vector of time for plots\n timeVector = range(0, nt)\n \n plt.plot(timeVector, massesVector, label=methodName)\n plt.title(\"Mass vs time step\\nc=\"+str(c))\n \n # calculate mean and variance around mean of the means\n meanOfMeans = np.mean(massesVector)\n varOfMeans = np.var(massesVector)\n \n # print results\n print(\"Average of mean \" + methodName + \": \" + str(meanOfMeans))\n print(\"Variance of mean \" + methodName + \": \" + str(varOfMeans))", "def drawAxes(t):\r\n t.speed(0)\r\n t.pd()\r\n t.forward(500)\r\n t.back(500)", "def plot(particle_array, num_particles, axees, sim_len, plot_step):\n fig = plt.figure()\n ax1 = plt.axes(xlim=(-axees, axees),\n ylim=(-axees, axees))\n _, = ax1.plot([], [], lw=2)\n plt.xlabel('X [m]')\n plt.ylabel('Y [m]')\n\n Writer = animation.writers['ffmpeg']\n writer = Writer(fps=35, metadata=dict(artist='Me'), bitrate=1800)\n\n colors = ['blue','green','red','black'] # 'cyan','magenta','yellow\n lines = []\n for _ in range(num_particles):\n lobj = ax1.plot([],[],lw=2,color=colors[random.randrange(0,len(colors))])[0]\n lines.append(lobj)\n\n def init():\n for line in lines:\n line.set_data([],[])\n return lines\n\n coord_tuples = [([], []) for _ in range(num_particles)]\n\n def animate(i):\n for index in range(0, num_particles):\n coord_tuples[index][0].append(particle_array[2*index, i])\n coord_tuples[index][1].append(particle_array[2*index+1, i])\n\n xlist = [tup[0] for tup in coord_tuples]\n ylist = [tup[1] for tup in coord_tuples]\n\n for lnum, line in enumerate(lines):\n line.set_data(xlist[lnum][-5:], ylist[lnum][-5:])\n\n return lines\n\n # call the animator. blit=True means only re-draw the parts that have changed.\n anim = animation.FuncAnimation(fig, animate, init_func=init,\n frames=range(0, sim_len, plot_step),\n interval=10, blit=False)\n start = time.perf_counter()\n print('Creating animation ...\\n')\n anim.save('simulation.mp4', writer=writer)\n calc_time = time.perf_counter() - start\n print(f'animation time: {calc_time} s\\n')", "def setup_anime(self, xmin_off=0, ymin_off=0, xmax_off=0, ymax_off=0):\n xtremes = [(min(x), min(y), max(x), max(y)) for x, y in self.artists]\n xmin = min(map(lambda lst: lst[0], xtremes)) + xmin_off\n ymin = min(map(lambda lst: lst[1], xtremes)) + ymin_off\n xmax = max(map(lambda lst: lst[2], xtremes)) + xmax_off\n ymax = max(map(lambda lst: lst[3], xtremes)) + ymax_off\n print(\"Xtremes:\", xmin, xmax, ymin, ymax)\n\n self.fig = plt.figure()\n self.ax = plt.axes(xlim=(xmin, xmax), ylim=(ymin, ymax),\n autoscale_on=False)\n self.ax.set_facecolor('k')\n self.ax.set(xlabel='x [a.u.]', ylabel='y [a.u.]',\n title='Projectile motion')\n self.ax.set_aspect('equal')\n self.ax.grid()\n\n for a in range(self.art_num):\n ln, = self.ax.plot([], [], '--')\n ln.set_clip_on(False)\n self.lines.append(ln)\n\n plt.gca().set_prop_cycle(None)\n\n for a in range(self.art_num):\n pt, = self.ax.plot([], [], 'o')\n pt.set_clip_on(False)\n self.points.append(pt)\n\n self.time_template = 'time = %d a.u.'\n self.time_text = self.ax.text(.5, .5, '', color='c',\n transform=self.ax.transAxes,\n horizontalalignment='center',\n verticalalignment='center')", "def angular_momentum(self, AM):\n # Printing the amplitude to command line\n amplitude = max(AM)-min(AM)\n print('Amplitude of angular momentum during %i year(s): %g[AU²/yr²]' \\\n %(self.t, amplitude))\n # Creating an axis for the time steps\n x = np.linspace(0, self.t, self.N*self.t+1)\n # Initializing the figure\n plt.figure(figsize=(10, 10))\n # Creating the plot\n plt.plot(x, AM)\n # Decorating the plot\n plt.suptitle('Total angular momentum in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²/yr²]', fontsize=16)\n plt.legend(['AM'])", "def v_from_p_function(self):\r\n\r\n track_c = [] # p classical function,\r\n for i in range(len(self.dt.momentum_t)):\r\n track_c.append(self.dt.momentum_t[i] / self.dt.mass)\r\n\r\n (fig, ax) = plt.subplots()\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n track_c,\r\n linestyle=':',\r\n linewidth=1,\r\n color='b',\r\n label='classic',\r\n )\r\n\r\n # marker=\"+\", markersize = 13,\r\n # ax.plot(self.dt.momentum_t, self.observer.velT, linestyle=\" \",\r\n # color=\"k\",marker=\"+\", markersize = 13, label=\"measurement\")\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vel_t,\r\n linestyle=' ',\r\n color='k',\r\n marker='o',\r\n label='result of measurements',\r\n )\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vel_anl,\r\n linestyle='-',\r\n color='red',\r\n linewidth=1,\r\n label='continuum',\r\n )\r\n\r\n # Euler's method == analitical function. We not plot it.\r\n\r\n ax.plot(\r\n self.dt.momentum_t,\r\n self.dt.vn,\r\n linestyle='--',\r\n color='blue',\r\n marker='x',\r\n linewidth=1,\r\n label=\"Euler's method\",\r\n )\r\n\r\n # error\r\n\r\n ax.errorbar(self.dt.momentum_t, self.dt.vel_t, fmt='k ',\r\n yerr=self.dt.vel_t_err)\r\n\r\n xm = -1.0\r\n for i in range(len(self.dt.momentum_t)):\r\n if self.dt.momentum_t[i] > xm:\r\n xm = self.dt.momentum_t[i]\r\n stepx = round(xm / float(len(self.dt.momentum_t)), 1)\r\n xm = round(xm + stepx, 1)\r\n ax.set_xlim([0, xm]) # xm = 0.85\r\n\r\n # signature on the horizontal x-axis\r\n\r\n ax.set_xlabel('p')\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on x)\r\n\r\n locatorx = matplotlib.ticker.MultipleLocator(base=stepx) # step on x is base=0.1\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.xaxis.set_major_locator(locatorx)\r\n\r\n # line draw\r\n\r\n line = matplotlib.lines.Line2D([0.0, 9.0], [1.0, 1.0], color='b'\r\n )\r\n ax.add_line(line)\r\n plt.text(0.7, 1.01, u'light speed', horizontalalignment='center'\r\n )\r\n ax.set_ylim([0, 1.1])\r\n\r\n # signature on vertical y axis\r\n\r\n ax.set_ylabel('v')\r\n\r\n # Create an instance of the class that will be responsible for the location of the labels (base is step on y)\r\n\r\n locatory = matplotlib.ticker.MultipleLocator(base=0.1) # step on y is base=0.1\r\n\r\n # Set the locator for the main labels\r\n\r\n ax.yaxis.set_major_locator(locatory)\r\n\r\n ax.grid()\r\n\r\n # show legend\r\n\r\n ax.legend(loc='upper left')\r\n\r\n # show drawing\r\n # pylab.show()\r\n\r\n plt.show()", "def demo(self, tmin=0, tmax=27.4, cadence=30.0 / 60.0 / 24.0, offset=0, raw=False, ax=None):\n t = np.arange(tmin, tmax, cadence)\n if ax is None:\n plt.figure('demo', figsize=(8, 3))\n else:\n plt.sca(ax)\n y = self.model(t)\n if raw:\n plt.plot(t, y + offset, alpha=0.25, linewidth=1, color='royalblue')\n plt.plot(t, self.integrated(t) + offset, alpha=0.5, linewidth=1, color='darkorange')\n plt.xlim(tmin, tmax)\n # plt.ylim(np.max(y)+0.01, np.min(y)-0.01)\n plt.xlabel('Time (days)')\n plt.ylabel('Flux (mag.)')", "def plot_ocp_momentum(self, figsize: Tuple[int, int] = (5, 5),\n ax: plt.axes = None):\n\n if not ax:\n fig, ax = plt.subplots(1, figsize=figsize)\n fig.suptitle(\"One Cycle Momentum with Tail\")\n\n ax.plot(self.steps_list, self.momentum_list, label=\"1cycle Momentum\")\n\n ax.set_xlabel(\"Steps\")\n ax.set_ylabel(\"Momentum\")\n\n return ax", "def plot_forces(t, v, m, a):\n plt.figure()\n drag = -m*a*abs(v)*v\n grav = [-m*9.81]*len(v)\n Boyancy = [1. * 9.81 * 0.1]*len(v) # rho * g * V\n Fsum = drag+grav+Boyancy\n plt.plot(t, drag, t, grav, t, Boyancy, t, Fsum)\n plt.legend([\"Drag force\", \"Gravity force\", \"Boyancy\", \"Sum of forces\"])\n plt.savefig('Forces.png')", "def animate(times: np.ndarray, angles: np.ndarray) -> None:\n x = np.sin(angles)\n y = -np.cos(angles)\n\n fig = plt.figure()\n ax = fig.add_subplot(111, autoscale_on=False, xlim=(-2, 2), ylim=(-2, 2))\n ax.grid()\n\n line, = ax.plot([], [], \"o-\", lw=2)\n\n def init():\n line.set_data([], [])\n return (line,)\n\n def animate(i):\n thisx = [0, x[i]]\n thisy = [0, y[i]]\n\n line.set_data(thisx, thisy)\n return (line,)\n\n ani = animation.FuncAnimation(\n fig, animate, np.arange(1, len(y)), interval=25, blit=True, init_func=init\n )\n plt.show()" ]
[ "0.7269444", "0.6327164", "0.5962005", "0.5897222", "0.5878594", "0.58591664", "0.5845195", "0.5835746", "0.5810472", "0.58035827", "0.57862294", "0.57339144", "0.56806844", "0.566614", "0.564614", "0.5617046", "0.5595726", "0.55792063", "0.5563439", "0.55550057", "0.55540186", "0.55378467", "0.5504329", "0.5499325", "0.5488154", "0.548599", "0.5461487", "0.5457307", "0.545616", "0.53929" ]
0.78880644
0
Return True if 'that' is a GrowPopulation and this GrowPopulation has the same value as that GrowPopulation
def __eq__(self, that): return isinstance(that, GrowPopulation) and self.species_index == that.species_index and \ self.card_trade_index == that.card_trade_index
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, population):\n return self.chromosome_list == population.chromosome_list", "def __eq__(self, other):\n return isinstance(other, Elongation)\\\n and len(self.xs) == len(other.xs)\\\n and all(self.xs == other.xs) and all(self.ys == other.ys)\\\n and self.gauge_length == other.gauge_length\\\n and self.sample_width == other.sample_width\\\n and self.sample_thickness == other.sample_thickness\\\n and self.name == other.name", "def __eq__(self: 'Cheese', other: 'Cheese') -> bool:\n return isinstance(other, Cheese) and self.size == other.size", "def __eq__(self, other):\n if not isinstance(other, PopSettings):\n return False\n\n return self.__dict__ == other.__dict__", "def mergeable(self, other):\n return self.gamma == other.gamma and self.min_value == other.min_value", "def __eq__(self, other):\n return isinstance(other, Bag) and Counter(self.items) == Counter(other.items)", "def _ok(self, assignment_graph, source, value, target):\n target_values = assignment_graph[target]\n return len(target_values - set([value])) > 0", "def isIndividualInPopulation(self, individual, population):\n\t\tfor i in population:\n\t\t\tif i == individual:\n\t\t\t\treturn True\n\t\treturn False", "def __eq__(self, other):\n # for each gene in each chromosome\n for g in range(len(self.genes)):\n # if any gene values don't match, then the chromosomes aren't equal\n if self.genes[g].value != other.genes[g].value:\n return False\n # everything matched, so the chromosomes are equal\n return True", "def __eq__(self, other):\n if type(self) is type(other) and \\\n self._qubits == other._qubits:\n return True\n return False", "def __eq__(self, other: Any) -> bool:\n\n if other is None:\n return False\n\n if isinstance(other, Value):\n other = other.to(self.units)\n\n return abs(float(self) - float(other)) < 1e-8", "def __eq__(self, autre: Any) -> bool:\n if type(self) != type(autre):\n return False\n return self._voisinage == autre._voisinage", "def is_consistent_with(self, target):\n same_parent = self.parent() == target.parent()\n # Note FP. Is it really required to have the\n # same parent? Inclusion of all proc may be enough?\n return npw.equal(self.shape, target.shape).all() and same_parent", "def equals(self, other):\n if not isinstance(other, PermutationGroup):\n return False\n\n set_self_gens = set(self.generators)\n set_other_gens = set(other.generators)\n\n # before reaching the general case there are also certain\n # optimisation and obvious cases requiring less or no actual\n # computation.\n if set_self_gens == set_other_gens:\n return True\n\n # in the most general case it will check that each generator of\n # one group belongs to the other PermutationGroup and vice-versa\n for gen1 in set_self_gens:\n if not other.contains(gen1):\n return False\n for gen2 in set_other_gens:\n if not self.contains(gen2):\n return False\n return True", "def __eq__(self, other):\n if isinstance(other, DenseUnit):\n return (Counter(self.dimension) == Counter(other.dimension) and Counter(self.points) == Counter(\n other.points))\n return False", "def __eq__(self, other):\n return self.value == other.value", "def __eq__(self, other):\n return isinstance(other, type(self)) and self.size == other.size", "def __eq__(self, other: Any) -> bool:\n if not isinstance(other, Just):\n return False\n return other.get == self.get", "def __eq__(self, other):\n if not isinstance(other, GiftDetails):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n if self.environment != other.environment:\n return False\n\n for i in range(0, len(self.genes)):\n if self.genes[i] != other.genes[i]:\n return False\n\n return True", "def __eq__(self, other):\n if not isinstance(other, TriggerHistories):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\r\n if self.over in other and self.under in other:\r\n return True\r\n else:\r\n return False", "def __eq__(self, other):\n if not isinstance(other, Constellation_class):\n return False\n return (self._g == other._g)", "def __eq__(self, other):\n return other and self.values == other.values", "def __eq__(self, other):\n if type(other) is type(self):\n return other.data == self.data\n return False", "def __eq__(self, other):\n return np.array_equal(self.hp, other.hp) and np.array_equal(self.hc, other.hc)", "def __eq__(self, other):\n return (type(self) == type(other) and\n self.puzzle == other.puzzle and\n all([x in self.children for x in other.children]) and\n all([x in other.children for x in self.children]))", "def __eq__(self, other):\n if not isinstance(other, Workitems):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other: Any) -> bool:\n if not isinstance(other, Transaction):\n return False\n return self.to_solders() == other.to_solders()", "def __eq__(self, other):\n if not isinstance(other, InstanceGroupAdjustment):\n return False\n\n return self.__dict__ == other.__dict__" ]
[ "0.65205014", "0.60275203", "0.6016418", "0.5955765", "0.59035647", "0.58681065", "0.5847267", "0.5844807", "0.5805479", "0.57989675", "0.5784682", "0.57827413", "0.57796884", "0.57624125", "0.57547325", "0.57336557", "0.5731267", "0.5723152", "0.5716336", "0.5712271", "0.5710719", "0.5703099", "0.5694585", "0.5673044", "0.5655102", "0.5648068", "0.56451243", "0.56374145", "0.5628103", "0.56178534" ]
0.80442977
0
Parse this population grow action from json to a GrowPopulation
def parse_grow_population(json_grow): if GrowPopulation.is_valid_pop_grow(json_grow): return GrowPopulation(json_grow[1], json_grow[2]) else: raise Exception("Invalid json population grow action")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_proxy_grow_population(json_grow):\n if GrowPopulation.is_valid_proxy_grow(json_grow):\n return GrowPopulation(json_grow[0], json_grow[1])\n else:\n raise Exception(\"Invalid json population grow action\")", "def is_valid_pop_grow(json_grow):\n return isinstance(json_grow, list) and len(json_grow) == 3 and json_grow[0] == \"population\" and\\\n not isinstance(json_grow[1], bool) and isinstance(json_grow[1], int) and json_grow[1] >= 0 and\\\n not isinstance(json_grow[2], bool) and isinstance(json_grow[2], int) and json_grow[2] >= 0", "def parsing_processor(self, change):\n self.processor(simplejson.loads(change))", "def read_population_distribution(path_POP, occupancy, verbose=False):\n with open(path_POP, 'r') as f:\n jd = json.load(f)\n\n data = jd[occupancy]\n\n # convert peak population to persons/m2\n data['peak'] = data['peak'] / (1000. * ft2)\n\n if verbose:\n pp.pprint(data)\n\n return data", "def parse_input_file(self, input_path):\n with open(input_path) as input_json_data:\n input_data = json.load(input_json_data)\n\n self.total_outlets = input_data['machine']['outlets']['count_n']\n self.total_items_quantity = input_data['machine']['total_items_quantity']\n self.beverages = input_data['machine']['beverages']\n\n parsed_data = {\n 'total_outlets': self.total_outlets,\n 'total_items_quantity': self.total_items_quantity,\n 'beverages': self.beverages\n }\n\n return parsed_data", "def _add_population(self, info, index, population):\n info[index] = [self._representation.decode(item) for\n item in population]", "def adding_population(self, population):\n try:\n for animals in population:\n for animal in animals['pop']:\n if animal['species'] == 'Herbivore':\n self.add_herbivores(animal, animals)\n if animal['species'] == 'Carnivore':\n self.add_carnivores(animal, animals)\n except (ValueError, KeyError):\n raise ValueError(\n 'Invalid input for population, see documentation.'\n )", "def load_population(self):\r\n checkpoint = load_pickle('spike_swarm_sim/checkpoints/populations/' + self.checkpoint_name)\r\n logging.info('Resuming CMA-ES evolution using checkpoint ' + self.checkpoint_name)\r\n key = tuple(self.populations.keys())[0]\r\n for key, pop in checkpoint['populations'].items():\r\n self.populations[key].strategy_m = checkpoint['mu'][key]\r\n self.populations[key].strategy_C = checkpoint['C'][key]\r\n self.populations[key].cc = checkpoint['cc'][key]\r\n self.populations[key].cs = checkpoint['cs'][key]\r\n self.populations[key].mu_cov = checkpoint['mu_cov'][key]\r\n self.populations[key].c_cov = checkpoint['c_cov'][key]\r\n self.populations[key].ds = checkpoint['ds'][key]\r\n self.populations[key].evo_path = checkpoint['evo_path'][key]\r\n self.populations[key].ps = checkpoint['ps'][key]\r\n self.populations[key].B = checkpoint['B'][key]\r\n self.populations[key].Bt = checkpoint['Bt'][key]\r\n self.populations[key].D = checkpoint['D'][key]\r\n self.populations[key].sigma = checkpoint['sigma'][key]\r\n self.populations[key].num_evals = checkpoint['num_evals'][key]\r\n self.populations[key].population = self.populations[key].sample()\r\n self.init_generation = checkpoint['generation']\r\n self.evolution_history = checkpoint['evolution_hist']", "def from_json(cls, json_str: str) -> Pig:\n instance = Pig.construct()\n error_messages = []\n match = 0\n\n # use oneOf discriminator to lookup the data type\n _data_type = json.loads(json_str).get(\"className\")\n if not _data_type:\n raise ValueError(\"Failed to lookup data type from the field `className` in the input.\")\n\n # check if data type is `BasquePig`\n if _data_type == \"BasquePig\":\n instance.actual_instance = BasquePig.from_json(json_str)\n return instance\n\n # check if data type is `DanishPig`\n if _data_type == \"DanishPig\":\n instance.actual_instance = DanishPig.from_json(json_str)\n return instance\n\n # deserialize data into BasquePig\n try:\n instance.actual_instance = BasquePig.from_json(json_str)\n match += 1\n except (ValidationError, ValueError) as e:\n error_messages.append(str(e))\n # deserialize data into DanishPig\n try:\n instance.actual_instance = DanishPig.from_json(json_str)\n match += 1\n except (ValidationError, ValueError) as e:\n error_messages.append(str(e))\n\n if match > 1:\n # more than 1 match\n raise ValueError(\"Multiple matches found when deserializing the JSON string into Pig with oneOf schemas: BasquePig, DanishPig. Details: \" + \", \".join(error_messages))\n elif match == 0:\n # no match\n raise ValueError(\"No match found when deserializing the JSON string into Pig with oneOf schemas: BasquePig, DanishPig. Details: \" + \", \".join(error_messages))\n else:\n return instance", "def _populate(self, json):\n if json != {}:\n new_nodes = [\n LKENodePoolNode(self._client, c) for c in json[\"nodes\"]\n ]\n json[\"nodes\"] = new_nodes\n\n super()._populate(json)", "def add_population(self, population):\n for species in population:\n y, x = [n - 1 for n in species['loc']]\n for ani in species['pop']:\n if ani['species'] == 'Herbivore':\n self.island.island[y][x].herbivores.append(Herbivore(\n weight=ani['weight'], age=ani['age'],\n coordinates=(y, x)))\n elif ani['species'] == 'Carnivore':\n self.island.island[y][x].carnivores.append(Carnivore(\n weight=ani['weight'], age=ani['age'],\n coordinates=(y, x)))", "def parse_json(self, json_cfg: Dict) -> Any:\n raise NotImplementedError", "def grow(self, stream):\n with self.db:\n grow = Grow.parse(stream)\n tree = grow(self.tree)\n return self.__class__(tree, self.db)", "def reach_process_json():\n response = request.body.read().decode('utf-8')\n body = json.loads(response)\n json_str = body.get('json')\n rp = reach.process_json_str(json_str)\n if rp and rp.statements:\n stmts = stmts_to_json(rp.statements)\n res = {'statements': stmts}\n return res\n else:\n res = {'statements': []}\n return res", "def population_timeline_chart_data(matchid):\n ps = Match.query.get(matchid).populationstats.all()\n labels = []\n popcounts = []\n lowestPop = 100\n\n for snapshot in ps:\n labels.append(snapshot.time.strftime('%H:%M'))\n popcounts.append(snapshot.popcount)\n if snapshot.popcount is None or snapshot.popcount < lowestPop:\n lowestPop = snapshot.popcount\n\n return json.dumps(labels), json.dumps(popcounts), lowestPop", "def parseAction(self, action):\n action = self.AGENT_TYPES[action]\n\n\n full_action = {}\n full_action[\"action\"] = action\n if action == \"eli-kw\":\n keywords = self.dataset.getSuggestedKeywords()\n full_action[\"keywords\"] = keywords[:self.N]\n elif action == \"info\" or action == \"info-all\":\n full_action[\"function\"] = self.current_function\n\n elif action == \"sugg\" or action == \"sugg-info-all\":\n top_hit = self.dataset.getTopHits(1)\n if not top_hit:\n full_action[\"action\"] = \"eli-query\"\n else:\n functions = self.dataset.getTopHits(1, self.result_index)\n if functions:\n full_action[\"function\"] = functions[0]\n else:\n full_action[\"function\"] = \"\"\n\n self.result_index += 1\n\n elif action == \"sugg-all\":\n full_action[\"list\"] = self.dataset.getTopHits(self.K, self.result_index)\n\n elif action == \"change-page\":\n self.result_index += self.K\n full_action[\"list\"] = self.dataset.getTopHits(self.K, self.result_index)\n return full_action", "def from_json(self, data: str) -> None:\n self.clear()\n self.extend(json.loads(data))", "def _parsejson(x):\n return json.loads(x.read().decode('utf-8'))", "def from_montage_json(data: Mapping):\n\n # logging.debug(pformat(data['exam_type']))\n\n # TODO: Check event flags for various event types to get ordering, study, and reading\n\n try:\n referring_physician = data['events'][0].get('provider')\n if referring_physician:\n referring_physician = referring_physician.get('name')\n\n study_datetime = None\n if len(data['events']) > 1:\n # Last event is usually read I think, take event _before_ last one\n study_event = data['events'][-2]\n if study_event.get('date'):\n study_datetime = DatetimeParser.parse(study_event['date'])\n else:\n # Otherwise just take whatever is last\n study_event = data['events'][-1]\n if study_event.get('date'):\n study_datetime = DatetimeParser.parse(study_event['date'])\n\n montage_cpts = []\n for resource in data[\"exam_type\"][\"cpts\"]:\n code = resource.split(\"/\")[-2]\n montage_cpts.append(code)\n\n tags = {\n \"AccessionNumber\": data[\"accession_number\"],\n \"PatientID\": data[\"patient_mrn\"],\n 'StudyDescription': data['exam_type']['description'],\n 'ReferringPhysicianName': referring_physician,\n 'PatientSex': data['patient_sex'],\n 'Organization': data['organization']['label'],\n \"Modality\": data['exam_type']['modality']['label']\n }\n\n meta = {\n 'BodyParts': None, # Placeholder for meta\n 'CPTCodes': None, # Placeholder for meta\n 'PatientName': \"{}^{}\".format(\n data[\"patient_last_name\"].upper(),\n data[\"patient_first_name\"].upper()),\n 'PatientAge': data['patient_age'],\n \"OrderCode\": data[\"exam_type\"][\"code\"],\n \"PatientStatus\": data[\"patient_status\"],\n \"ReportText\": Montage.clean_text(data['text']),\n \"ReadingPhysiciansName\": data['events'][-1]['provider']['name'],\n 'StudyDateTime': study_datetime,\n \"MontageCPTCodes\": montage_cpts\n }\n except KeyError:\n meta = {\n \"BodyParts\": None,\n \"CPTCodes\": None,\n \"MontageCPTCodes\": data['meta']['MontageCPTCodes'],\n \"OrderCode\": data['meta']['OrderCode'],\n \"PatientAge\": data['meta']['PatientAge'],\n \"PatientName\": data['meta']['PatientName'],\n \"PatientStatus\": data['meta']['PatientStatus'],\n \"ReadingPhysiciansName\": data['meta']['ReadingPhysiciansName'],\n \"ReportText\": data['meta']['ReportText'],\n \"StudyDateTime\": data['meta']['StudyDateTime']\n }\n tags = {\n \"AccessionNumber\": data['tags']['AccessionNumber'],\n \"Modality\": data['tags']['Modality'],\n \"Organization\": data['tags']['Organization'],\n \"PatientID\": data['tags']['PatientID'],\n \"PatientSex\": data['tags']['PatientSex'],\n \"ReferringPhysicianName\": data['tags']['ReferringPhysicianName'],\n \"StudyDescription\": data['tags']['StudyDescription']\n }\n\n d = Dixel(meta=meta,\n tags=tags,\n level=DicomLevel.STUDIES)\n d.report = RadiologyReport(meta['ReportText'])\n\n return d", "def from_json_file(path):\n with open(path, 'r') as f:\n return ReactionProbabilities.from_json(f.read())", "def process_json_community_op(actor, op_json, date):\n CommunityOp.process_if_valid(actor, op_json, date)", "def create_new_population(self):\n self.check_for_generation_cap()\n pop_container = list()\n for chromosome in self.population:\n partner = bm.select_partner(\n self.fitness_scores, self.population)\n child = bm.mutate(bm.crossover(chromosome, partner))\n pop_container.append(child)\n if self.population == pop_container:\n print(\"newly created populous is the same as the old populous\")\n self.population = pop_container\n print(\"generations: \", self.generations)\n self.generations += 1", "def parse(self):\n pass", "def parse(self):\n pass", "def parse(self):\n pass", "def parse(self):\n pass", "def from_json(self, content):\r\n return simplejson.loads(content)", "def load_file(self):\n self._check_setup()\n json_str = self.get_json_file()\n if json_str is None:\n return\n\n if not self._is_json_str():\n with open(json_str, 'r') as f:\n jf = json.load(f)\n else:\n jf = json.loads(json_str)\n\n\n self.jf = jf\n\n target = jf['target']\n if isinstance(target, str):\n target = eval(target)\n\n goal = jf['goal']\n if isinstance(goal, str):\n goal = eval(goal)\n\n self.gen_target_pos = np.array(target)\n self.gen_goal_pos = np.array(goal)\n\n if 'place_walls' in jf:\n self.place_walls = jf['place_walls']\n\n if self.get_is_rnd():\n self.rnd_map = jf['rnd']\n self.env_jf = jf['env']", "def from_json(cls, data: dict) -> \"Pokemon\":\n\n moves: list = list(map(Move.from_json, data[\"move_set\"]))\n return cls(data[\"id\"], data[\"name\"], data[\"types\"], data[\"weight\"],\n data[\"height\"], data[\"abilities\"], data[\"move_list\"], moves)", "def from_json(s):\n try:\n d = json.loads(s)\n except ValueError:\n raise ParsingError('Failed to parse JSON: ' + s)\n\n try:\n action, args = d['action'], d['args']\n\n except KeyError:\n raise ParsingError('Failed to decode Command object from JSON: '+s)\n\n return GameAction(action, *args)" ]
[ "0.76610595", "0.5805996", "0.50387174", "0.49424437", "0.4896679", "0.48292255", "0.47847217", "0.4774338", "0.4707168", "0.47061458", "0.46529007", "0.45922315", "0.457492", "0.4555513", "0.45065725", "0.4488905", "0.4473739", "0.44697726", "0.44694299", "0.44630593", "0.44630477", "0.4446886", "0.44406494", "0.44406494", "0.44406494", "0.44406494", "0.44351655", "0.4432901", "0.44278046", "0.44275168" ]
0.80320364
0
Parse this population grow action from json to a GrowPopulation
def parse_proxy_grow_population(json_grow): if GrowPopulation.is_valid_proxy_grow(json_grow): return GrowPopulation(json_grow[0], json_grow[1]) else: raise Exception("Invalid json population grow action")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_grow_population(json_grow):\n if GrowPopulation.is_valid_pop_grow(json_grow):\n return GrowPopulation(json_grow[1], json_grow[2])\n else:\n raise Exception(\"Invalid json population grow action\")", "def is_valid_pop_grow(json_grow):\n return isinstance(json_grow, list) and len(json_grow) == 3 and json_grow[0] == \"population\" and\\\n not isinstance(json_grow[1], bool) and isinstance(json_grow[1], int) and json_grow[1] >= 0 and\\\n not isinstance(json_grow[2], bool) and isinstance(json_grow[2], int) and json_grow[2] >= 0", "def parsing_processor(self, change):\n self.processor(simplejson.loads(change))", "def read_population_distribution(path_POP, occupancy, verbose=False):\n with open(path_POP, 'r') as f:\n jd = json.load(f)\n\n data = jd[occupancy]\n\n # convert peak population to persons/m2\n data['peak'] = data['peak'] / (1000. * ft2)\n\n if verbose:\n pp.pprint(data)\n\n return data", "def parse_input_file(self, input_path):\n with open(input_path) as input_json_data:\n input_data = json.load(input_json_data)\n\n self.total_outlets = input_data['machine']['outlets']['count_n']\n self.total_items_quantity = input_data['machine']['total_items_quantity']\n self.beverages = input_data['machine']['beverages']\n\n parsed_data = {\n 'total_outlets': self.total_outlets,\n 'total_items_quantity': self.total_items_quantity,\n 'beverages': self.beverages\n }\n\n return parsed_data", "def _add_population(self, info, index, population):\n info[index] = [self._representation.decode(item) for\n item in population]", "def adding_population(self, population):\n try:\n for animals in population:\n for animal in animals['pop']:\n if animal['species'] == 'Herbivore':\n self.add_herbivores(animal, animals)\n if animal['species'] == 'Carnivore':\n self.add_carnivores(animal, animals)\n except (ValueError, KeyError):\n raise ValueError(\n 'Invalid input for population, see documentation.'\n )", "def load_population(self):\r\n checkpoint = load_pickle('spike_swarm_sim/checkpoints/populations/' + self.checkpoint_name)\r\n logging.info('Resuming CMA-ES evolution using checkpoint ' + self.checkpoint_name)\r\n key = tuple(self.populations.keys())[0]\r\n for key, pop in checkpoint['populations'].items():\r\n self.populations[key].strategy_m = checkpoint['mu'][key]\r\n self.populations[key].strategy_C = checkpoint['C'][key]\r\n self.populations[key].cc = checkpoint['cc'][key]\r\n self.populations[key].cs = checkpoint['cs'][key]\r\n self.populations[key].mu_cov = checkpoint['mu_cov'][key]\r\n self.populations[key].c_cov = checkpoint['c_cov'][key]\r\n self.populations[key].ds = checkpoint['ds'][key]\r\n self.populations[key].evo_path = checkpoint['evo_path'][key]\r\n self.populations[key].ps = checkpoint['ps'][key]\r\n self.populations[key].B = checkpoint['B'][key]\r\n self.populations[key].Bt = checkpoint['Bt'][key]\r\n self.populations[key].D = checkpoint['D'][key]\r\n self.populations[key].sigma = checkpoint['sigma'][key]\r\n self.populations[key].num_evals = checkpoint['num_evals'][key]\r\n self.populations[key].population = self.populations[key].sample()\r\n self.init_generation = checkpoint['generation']\r\n self.evolution_history = checkpoint['evolution_hist']", "def from_json(cls, json_str: str) -> Pig:\n instance = Pig.construct()\n error_messages = []\n match = 0\n\n # use oneOf discriminator to lookup the data type\n _data_type = json.loads(json_str).get(\"className\")\n if not _data_type:\n raise ValueError(\"Failed to lookup data type from the field `className` in the input.\")\n\n # check if data type is `BasquePig`\n if _data_type == \"BasquePig\":\n instance.actual_instance = BasquePig.from_json(json_str)\n return instance\n\n # check if data type is `DanishPig`\n if _data_type == \"DanishPig\":\n instance.actual_instance = DanishPig.from_json(json_str)\n return instance\n\n # deserialize data into BasquePig\n try:\n instance.actual_instance = BasquePig.from_json(json_str)\n match += 1\n except (ValidationError, ValueError) as e:\n error_messages.append(str(e))\n # deserialize data into DanishPig\n try:\n instance.actual_instance = DanishPig.from_json(json_str)\n match += 1\n except (ValidationError, ValueError) as e:\n error_messages.append(str(e))\n\n if match > 1:\n # more than 1 match\n raise ValueError(\"Multiple matches found when deserializing the JSON string into Pig with oneOf schemas: BasquePig, DanishPig. Details: \" + \", \".join(error_messages))\n elif match == 0:\n # no match\n raise ValueError(\"No match found when deserializing the JSON string into Pig with oneOf schemas: BasquePig, DanishPig. Details: \" + \", \".join(error_messages))\n else:\n return instance", "def _populate(self, json):\n if json != {}:\n new_nodes = [\n LKENodePoolNode(self._client, c) for c in json[\"nodes\"]\n ]\n json[\"nodes\"] = new_nodes\n\n super()._populate(json)", "def add_population(self, population):\n for species in population:\n y, x = [n - 1 for n in species['loc']]\n for ani in species['pop']:\n if ani['species'] == 'Herbivore':\n self.island.island[y][x].herbivores.append(Herbivore(\n weight=ani['weight'], age=ani['age'],\n coordinates=(y, x)))\n elif ani['species'] == 'Carnivore':\n self.island.island[y][x].carnivores.append(Carnivore(\n weight=ani['weight'], age=ani['age'],\n coordinates=(y, x)))", "def parse_json(self, json_cfg: Dict) -> Any:\n raise NotImplementedError", "def grow(self, stream):\n with self.db:\n grow = Grow.parse(stream)\n tree = grow(self.tree)\n return self.__class__(tree, self.db)", "def reach_process_json():\n response = request.body.read().decode('utf-8')\n body = json.loads(response)\n json_str = body.get('json')\n rp = reach.process_json_str(json_str)\n if rp and rp.statements:\n stmts = stmts_to_json(rp.statements)\n res = {'statements': stmts}\n return res\n else:\n res = {'statements': []}\n return res", "def population_timeline_chart_data(matchid):\n ps = Match.query.get(matchid).populationstats.all()\n labels = []\n popcounts = []\n lowestPop = 100\n\n for snapshot in ps:\n labels.append(snapshot.time.strftime('%H:%M'))\n popcounts.append(snapshot.popcount)\n if snapshot.popcount is None or snapshot.popcount < lowestPop:\n lowestPop = snapshot.popcount\n\n return json.dumps(labels), json.dumps(popcounts), lowestPop", "def parseAction(self, action):\n action = self.AGENT_TYPES[action]\n\n\n full_action = {}\n full_action[\"action\"] = action\n if action == \"eli-kw\":\n keywords = self.dataset.getSuggestedKeywords()\n full_action[\"keywords\"] = keywords[:self.N]\n elif action == \"info\" or action == \"info-all\":\n full_action[\"function\"] = self.current_function\n\n elif action == \"sugg\" or action == \"sugg-info-all\":\n top_hit = self.dataset.getTopHits(1)\n if not top_hit:\n full_action[\"action\"] = \"eli-query\"\n else:\n functions = self.dataset.getTopHits(1, self.result_index)\n if functions:\n full_action[\"function\"] = functions[0]\n else:\n full_action[\"function\"] = \"\"\n\n self.result_index += 1\n\n elif action == \"sugg-all\":\n full_action[\"list\"] = self.dataset.getTopHits(self.K, self.result_index)\n\n elif action == \"change-page\":\n self.result_index += self.K\n full_action[\"list\"] = self.dataset.getTopHits(self.K, self.result_index)\n return full_action", "def from_json(self, data: str) -> None:\n self.clear()\n self.extend(json.loads(data))", "def _parsejson(x):\n return json.loads(x.read().decode('utf-8'))", "def from_montage_json(data: Mapping):\n\n # logging.debug(pformat(data['exam_type']))\n\n # TODO: Check event flags for various event types to get ordering, study, and reading\n\n try:\n referring_physician = data['events'][0].get('provider')\n if referring_physician:\n referring_physician = referring_physician.get('name')\n\n study_datetime = None\n if len(data['events']) > 1:\n # Last event is usually read I think, take event _before_ last one\n study_event = data['events'][-2]\n if study_event.get('date'):\n study_datetime = DatetimeParser.parse(study_event['date'])\n else:\n # Otherwise just take whatever is last\n study_event = data['events'][-1]\n if study_event.get('date'):\n study_datetime = DatetimeParser.parse(study_event['date'])\n\n montage_cpts = []\n for resource in data[\"exam_type\"][\"cpts\"]:\n code = resource.split(\"/\")[-2]\n montage_cpts.append(code)\n\n tags = {\n \"AccessionNumber\": data[\"accession_number\"],\n \"PatientID\": data[\"patient_mrn\"],\n 'StudyDescription': data['exam_type']['description'],\n 'ReferringPhysicianName': referring_physician,\n 'PatientSex': data['patient_sex'],\n 'Organization': data['organization']['label'],\n \"Modality\": data['exam_type']['modality']['label']\n }\n\n meta = {\n 'BodyParts': None, # Placeholder for meta\n 'CPTCodes': None, # Placeholder for meta\n 'PatientName': \"{}^{}\".format(\n data[\"patient_last_name\"].upper(),\n data[\"patient_first_name\"].upper()),\n 'PatientAge': data['patient_age'],\n \"OrderCode\": data[\"exam_type\"][\"code\"],\n \"PatientStatus\": data[\"patient_status\"],\n \"ReportText\": Montage.clean_text(data['text']),\n \"ReadingPhysiciansName\": data['events'][-1]['provider']['name'],\n 'StudyDateTime': study_datetime,\n \"MontageCPTCodes\": montage_cpts\n }\n except KeyError:\n meta = {\n \"BodyParts\": None,\n \"CPTCodes\": None,\n \"MontageCPTCodes\": data['meta']['MontageCPTCodes'],\n \"OrderCode\": data['meta']['OrderCode'],\n \"PatientAge\": data['meta']['PatientAge'],\n \"PatientName\": data['meta']['PatientName'],\n \"PatientStatus\": data['meta']['PatientStatus'],\n \"ReadingPhysiciansName\": data['meta']['ReadingPhysiciansName'],\n \"ReportText\": data['meta']['ReportText'],\n \"StudyDateTime\": data['meta']['StudyDateTime']\n }\n tags = {\n \"AccessionNumber\": data['tags']['AccessionNumber'],\n \"Modality\": data['tags']['Modality'],\n \"Organization\": data['tags']['Organization'],\n \"PatientID\": data['tags']['PatientID'],\n \"PatientSex\": data['tags']['PatientSex'],\n \"ReferringPhysicianName\": data['tags']['ReferringPhysicianName'],\n \"StudyDescription\": data['tags']['StudyDescription']\n }\n\n d = Dixel(meta=meta,\n tags=tags,\n level=DicomLevel.STUDIES)\n d.report = RadiologyReport(meta['ReportText'])\n\n return d", "def from_json_file(path):\n with open(path, 'r') as f:\n return ReactionProbabilities.from_json(f.read())", "def process_json_community_op(actor, op_json, date):\n CommunityOp.process_if_valid(actor, op_json, date)", "def create_new_population(self):\n self.check_for_generation_cap()\n pop_container = list()\n for chromosome in self.population:\n partner = bm.select_partner(\n self.fitness_scores, self.population)\n child = bm.mutate(bm.crossover(chromosome, partner))\n pop_container.append(child)\n if self.population == pop_container:\n print(\"newly created populous is the same as the old populous\")\n self.population = pop_container\n print(\"generations: \", self.generations)\n self.generations += 1", "def parse(self):\n pass", "def parse(self):\n pass", "def parse(self):\n pass", "def parse(self):\n pass", "def from_json(self, content):\r\n return simplejson.loads(content)", "def load_file(self):\n self._check_setup()\n json_str = self.get_json_file()\n if json_str is None:\n return\n\n if not self._is_json_str():\n with open(json_str, 'r') as f:\n jf = json.load(f)\n else:\n jf = json.loads(json_str)\n\n\n self.jf = jf\n\n target = jf['target']\n if isinstance(target, str):\n target = eval(target)\n\n goal = jf['goal']\n if isinstance(goal, str):\n goal = eval(goal)\n\n self.gen_target_pos = np.array(target)\n self.gen_goal_pos = np.array(goal)\n\n if 'place_walls' in jf:\n self.place_walls = jf['place_walls']\n\n if self.get_is_rnd():\n self.rnd_map = jf['rnd']\n self.env_jf = jf['env']", "def from_json(cls, data: dict) -> \"Pokemon\":\n\n moves: list = list(map(Move.from_json, data[\"move_set\"]))\n return cls(data[\"id\"], data[\"name\"], data[\"types\"], data[\"weight\"],\n data[\"height\"], data[\"abilities\"], data[\"move_list\"], moves)", "def from_json(s):\n try:\n d = json.loads(s)\n except ValueError:\n raise ParsingError('Failed to parse JSON: ' + s)\n\n try:\n action, args = d['action'], d['args']\n\n except KeyError:\n raise ParsingError('Failed to decode Command object from JSON: '+s)\n\n return GameAction(action, *args)" ]
[ "0.80320364", "0.5805996", "0.50387174", "0.49424437", "0.4896679", "0.48292255", "0.47847217", "0.4774338", "0.4707168", "0.47061458", "0.46529007", "0.45922315", "0.457492", "0.4555513", "0.45065725", "0.4488905", "0.4473739", "0.44697726", "0.44694299", "0.44630593", "0.44630477", "0.4446886", "0.44406494", "0.44406494", "0.44406494", "0.44406494", "0.44351655", "0.4432901", "0.44278046", "0.44275168" ]
0.76610595
1
Is json_grow a valid json representation of a species growth?
def is_valid_pop_grow(json_grow): return isinstance(json_grow, list) and len(json_grow) == 3 and json_grow[0] == "population" and\ not isinstance(json_grow[1], bool) and isinstance(json_grow[1], int) and json_grow[1] >= 0 and\ not isinstance(json_grow[2], bool) and isinstance(json_grow[2], int) and json_grow[2] >= 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_grow_population(json_grow):\n if GrowPopulation.is_valid_pop_grow(json_grow):\n return GrowPopulation(json_grow[1], json_grow[2])\n else:\n raise Exception(\"Invalid json population grow action\")", "def parse_proxy_grow_population(json_grow):\n if GrowPopulation.is_valid_proxy_grow(json_grow):\n return GrowPopulation(json_grow[0], json_grow[1])\n else:\n raise Exception(\"Invalid json population grow action\")", "def is_valid_proxy_grow(json_grow):\n return isinstance(json_grow, list) and len(json_grow) == 2 and\\\n not isinstance(json_grow[0], bool) and isinstance(json_grow[0], int) and json_grow[0] >= 0 and\\\n not isinstance(json_grow[1], bool) and isinstance(json_grow[1], int) and json_grow[1] >= 0", "def test_json():\n bounds = MolecularStructureBounds()\n copy = loads(dumps(bounds))\n assert copy == bounds", "def test_json_serialization(self, molecule):\n molecule_copy = Molecule.from_json(molecule.to_json())\n assert molecule_copy == molecule\n assert molecule_copy.n_conformers == molecule.n_conformers\n assert np.allclose(molecule_copy.conformers[0], molecule.conformers[0])", "def test_json_serialization(self, molecule):\n # TODO: Test round-trip, on mini_drug_bank, when to_json bug is fixed, see #547\n mol = Molecule.from_smiles(\"CCO\")\n molecule_copy = Molecule.from_json(mol.to_json())\n assert molecule_copy == mol\n mol.generate_conformers(n_conformers=1)\n with pytest.raises(TypeError):\n mol.to_json()", "def test_json():\n smiles = Smiles(\"c1(C=O)cc(OC)c(O)cc1\")\n copy = loads(dumps(smiles))\n assert(copy == smiles)", "def flatten_json_data(beer_json):\n if 'available' in beer_json:\n if 'name' in beer_json['available']:\n beer_json['available_name'] = beer_json['available']['name']\n del beer_json['available']\n if 'glass' in beer_json:\n if 'name' in beer_json['glass']:\n beer_json['glass_name'] = beer_json['glass']['name']\n del beer_json['glass']\n if 'style' in beer_json:\n if 'abvMax' in beer_json['style']:\n beer_json['style_abvMax'] = beer_json['style']['abvMax']\n if 'abvMin' in beer_json['style']:\n beer_json['style_abvMin'] = beer_json['style']['abvMin']\n if 'category' in beer_json['style']:\n beer_json['style_category_name'] = beer_json['style']['category']['name']\n if 'description' in beer_json['style']:\n beer_json['style_description'] = beer_json['style']['description']\n if 'fgMax' in beer_json['style']:\n beer_json['style_fgMax'] = beer_json['style']['fgMax']\n if 'fgMin' in beer_json['style']:\n beer_json['style_fgMin'] = beer_json['style']['fgMin']\n if 'ibuMax' in beer_json['style']:\n beer_json['style_ibuMax'] = beer_json['style']['ibuMax']\n if 'ibuMin' in beer_json['style']:\n beer_json['style_ibuMin'] = beer_json['style']['ibuMin']\n if 'name' in beer_json['style']:\n beer_json['style_name'] = beer_json['style']['name']\n if 'ogMin' in beer_json['style']:\n beer_json['style_ogMin'] = beer_json['style']['ogMin']\n if 'ogMax' in beer_json['style']:\n beer_json['style_ogMax'] = beer_json['style']['ogMax']\n if 'shortName' in beer_json['style']:\n beer_json['style_shortName'] = beer_json['style']['shortName']\n if 'srmMax' in beer_json['style']:\n beer_json['style_srmMax'] = beer_json['style']['srmMax']\n if 'srmMin' in beer_json['style']:\n beer_json['style_srmMin'] = beer_json['style']['srmMin']\n del (beer_json['style'])\n else:\n beer_json = None\n\n return beer_json", "def json_friendly(self):", "def test_to_from_json():\n matg = matgame.matgame([[[1, 2], [3, 4]], [[5, 6], [7, 8]], [[9, 10], [11, 12]]])\n mjson = {\n \"players\": {\"r0\": 1, \"r1\": 1},\n \"strategies\": {\"r0\": [\"s0\", \"s1\", \"s2\"], \"r1\": [\"s3\", \"s4\"]},\n \"payoffs\": {\n \"s0\": {\"s3\": {\"r0\": 1, \"r1\": 2}, \"s4\": {\"r0\": 3, \"r1\": 4}},\n \"s1\": {\"s3\": {\"r0\": 5, \"r1\": 6}, \"s4\": {\"r0\": 7, \"r1\": 8}},\n \"s2\": {\"s3\": {\"r0\": 9, \"r1\": 10}, \"s4\": {\"r0\": 11, \"r1\": 12}},\n },\n \"type\": \"matrix.1\",\n }\n assert matg.to_json() == mjson\n assert json.loads(json.dumps(matg.to_json())) == mjson\n assert matg == matgame.matgame_json(mjson)", "def validate_json(self):\n pass", "def test_json_numpy_roundtrips(self):\n mol = Molecule.from_smiles(\"CCO\")\n mol.generate_conformers(n_conformers=1)\n initial_conformer = mol.conformers[0]\n\n for _ in range(10):\n mol = Molecule.from_json(mol.to_json())\n\n assert np.allclose(initial_conformer, mol.conformers[0])", "def test_list_int(self):\n json_data = json.dumps([{\"Gender\": \"Male\", \"HeightCm\": 171, \"WeightKg\": 96},\n {\"Gender\": \"Male\", \"HeightCm\": 161, \"WeightKg\": 85},\n {\"Gender\": \"Male\", \"HeightCm\": 180, \"WeightKg\": 77},\n {\"Gender\": \"Female\", \"HeightCm\": 166, \"WeightKg\": 62},\n {\"Gender\": \"Female\", \"HeightCm\": 150, \"WeightKg\": 70},\n {\"Gender\": \"Female\", \"HeightCm\": 167, \"WeightKg\": 82}])\n\n expected_result = json.dumps({\n \"updatedJson\": [\n {\"Gender\": \"Male\", \"HeightCm\": 171.0, \"WeightKg\": 96.0, \"BMI\": 32.8,\n \"BMICategory\": \"Moderately obese\", \"HealthRisk\": \"Medium risk\"},\n {\"Gender\": \"Male\", \"HeightCm\": 161.0, \"WeightKg\": 85.0, \"BMI\": 32.8,\n \"BMICategory\": \"Moderately obese\", \"HealthRisk\": \"Medium risk\"},\n {\"Gender\": \"Male\", \"HeightCm\": 180.0, \"WeightKg\": 77.0, \"BMI\": 23.8,\n \"BMICategory\": \"Normal weight\", \"HealthRisk\": \"Low risk\"},\n {\"Gender\": \"Female\", \"HeightCm\": 166.0, \"WeightKg\": 62.0, \"BMI\": 22.5,\n \"BMICategory\": \"Normal weight\", \"HealthRisk\": \"Low risk\"},\n {\"Gender\": \"Female\", \"HeightCm\": 150.0, \"WeightKg\": 70.0, \"BMI\": 31.1,\n \"BMICategory\": \"Moderately obese\", \"HealthRisk\": \"Medium risk\"},\n {\"Gender\": \"Female\", \"HeightCm\": 167.0, \"WeightKg\": 82.0, \"BMI\": 29.4,\n \"BMICategory\": \"Overweight\", \"HealthRisk\": \"Enhanced risk\"}\n ],\n \"Underweight count\": 0,\n \"Normal weight count\": 2,\n \"Overweight count\": 1,\n \"Moderately obese count\": 3,\n \"Severely obese count\": 0,\n \"Very severely obese count\": 0\n })\n\n result = process_person_json(json_data)\n self.assertEqual(result, expected_result)", "def minimal_json42():\n return {\n 'identifiers': [{\n 'identifierType': 'DOI',\n 'identifier': '10.1234/foo.bar',\n }],\n 'creators': [\n {'name': 'Nielsen, Lars Holm'},\n ],\n 'titles': [\n {'title': 'Minimal Test Case'}\n ],\n 'publisher': 'Invenio Software',\n 'publicationYear': '2016',\n 'types': {\n 'resourceType': '',\n 'resourceTypeGeneral': 'Software'\n },\n 'schemaVersion': 'http://datacite.org/schema/kernel-4'\n }", "def test_embedded_json(self):\n json_data = '{\"a\": {\"b\" : true } }'\n json_flattened = json_flatten(json_data)\n self.assertEqual(json.loads(json_flattened), json.loads('{\"a.b\" : true}'))", "def test_base_case_json(self):\n json_data = '{\"a\": 1}'\n json_flattened = json_flatten(json_data)\n self.assertEqual(json.loads(json_flattened), json.loads('{\"a\" : 1}'))", "def test_empty_json(self):\n json_data = '{ }'\n json_flattened = json_flatten(json_data)\n self.assertEqual(json.loads(json_flattened), json.loads('{ }'))", "def test_json_to_python(self):\n\n # There seems to be a problem with Flask-Login setting the current_user proxy\n # in api/models.py, which we need t run this test.\n if False:\n self.login_test_user()\n\n location = {\n 'address' : '123 Main St.',\n 'lat' : '127.0', # forgive numbers coming as strings\n 'lng' : -42,\n 'name' : 'nowhere',\n 'id' : str(ObjectId())\n }\n\n expanded = Location.from_json(location)\n\n # these should all be the same\n self.assertEqual(expanded['address'], location['address'])\n self.assertEqual(expanded['lat'], location['lat'])\n self.assertEqual(expanded['lng'], location['lng'])\n self.assertEqual(expanded['name'], location['name'])\n\n # owner should be set by the currently logged in location\n self.assertEqual(expanded['owner'], self.test_location.id)\n\n # id should be renamed from id to _id, and expanded\n self.assertTrue(expanded.has_key('_id'))\n self.assertFalse(expanded.has_key('id'))\n self.assertEqual(str(expanded['_id']), location['id'])", "def test_example_json(self):\n json_data = '{ \"a\": 1, \"b\": true, \"c\": { \"d\": 3, \"e\": \"test\" } }'\n json_flattened = json_flatten(json_data)\n self.assertEqual(json.loads(json_flattened),\n json.loads('{ \"a\": 1, \"b\": true, \"c.d\": 3, \"c.e\": \"test\" }'))", "def test_random_to_from_json(strats):\n payoffs = rand.random(tuple(strats) + (len(strats),))\n matg = matgame.matgame(payoffs)\n jgame = json.dumps(matg.to_json())\n copy = matgame.matgame_json(json.loads(jgame))\n assert matg == copy", "def test_json_round_trip():\n j = '[{\"name\": \"Alice\", \"occupation\": \"researcher\"}]'\n s = Store(\"Azure\")\n cell_hash = str(uuid.uuid4())\n frame_name = str(uuid.uuid4())\n s.write(j, cell_hash, frame_name)\n result = s.read(cell_hash, frame_name).decode(\"utf-8\")\n assert(result == j)", "def is_valid_json(j):\n try:\n json.dumps(j)\n return True\n except json.JSONDecodeError:\n print(\"not valid json\")\n return False", "def test_read_json_compressed():\n s = JsonSource()\n g = s.parse(os.path.join(RESOURCE_DIR, 'valid.json.gz'), compression='gz')\n nodes = {}\n edges = {}\n for rec in g:\n if rec:\n if len(rec) == 4:\n edges[(rec[0], rec[1])] = rec[3]\n else:\n nodes[rec[0]] = rec[1]\n\n assert len(nodes.keys()) == 6\n assert len(edges.keys()) == 5\n\n n = nodes['MONDO:0017148']\n assert 'id' in n and n['id'] == 'MONDO:0017148'\n assert n['name'] == 'heritable pulmonary arterial hypertension'\n assert n['category'][0] == 'biolink:Disease'\n\n e = edges[('HGNC:11603', 'MONDO:0017148')]\n assert e['subject'] == 'HGNC:11603'\n assert e['object'] == 'MONDO:0017148'\n assert e['predicate'] == 'biolink:related_to'\n assert e['relation'] == 'RO:0004013'", "def json(self) -> Dict[str, Union[List, Dict, str, int, float]]:", "def test_from_to_json_stat_no_loads(self):\n\n results = pyjstat.from_json_stat(self.oecd_datasets)\n json_data = json.loads(pyjstat.to_json_stat(results),\n object_pairs_hook=OrderedDict)\n data_df = pyjstat.from_json_stat(json_data)\n line_thirty = ['Unemployment rate', 'Belgium', 2009, 7.891892855]\n dimensions = pyjstat.get_dimensions(self.oecd_datasets['oecd'],\n 'label')\n self.assertTrue(len(data_df) == 2)\n self.assertTrue(set(data_df[0].columns.values[:-1]) ==\n set(dimensions[1]))\n self.assertTrue(set(data_df[0].iloc[30].values) ==\n set(line_thirty))", "def minimal_json43():\n return {\n 'identifiers': [{\n 'identifierType': 'DOI',\n 'identifier': '10.1234/foo.bar',\n }],\n 'creators': [\n {'name': 'Nielsen, Lars Holm'},\n ],\n 'titles': [\n {'title': 'Minimal Test Case'}\n ],\n 'publisher': 'Invenio Software',\n 'publicationYear': '2016',\n 'types': {\n 'resourceType': '',\n 'resourceTypeGeneral': 'Software'\n },\n 'schemaVersion': 'http://datacite.org/schema/kernel-4'\n }", "def test_from_to_json_stat_as_dict(self):\n\n results = pyjstat.from_json_stat(self.oecd_datasets)\n json_data = json.loads(pyjstat.to_json_stat(results, output='dict'),\n object_pairs_hook=OrderedDict)\n data_df = pyjstat.from_json_stat(json.loads(json.dumps(json_data),\n object_pairs_hook=\n OrderedDict))\n line_thirty = ['Unemployment rate', 'Belgium', 2009, 7.891892855]\n dimensions = pyjstat.get_dimensions(self.oecd_datasets['oecd'],\n 'label')\n self.assertTrue(len(data_df) == 2)\n self.assertTrue(set(data_df[0].columns.values[:-1]) ==\n set(dimensions[1]))\n self.assertTrue(set(data_df[0].iloc[30].values) ==\n set(line_thirty))", "def jsonify_01(data):\n if isinstance(data,dict):\n serialized_summary= dict()\n for key,value in data.items():\n if isinstance(value, list):\n value = [jsonify_01(item) for item in value]\n elif isinstance(value, list):\n value = jsonify_01(value)\n elif type(value).__module__=='numpy':\n value=value.tolist()\n else:\n if isinstance(value, dict):\n for key2,value2 in value.items():\n value[key2]=jsonify_01(value2)\n if isinstance(value,scipy.sparse.coo.coo_matrix):\n value=\"not serializable\"\n serialized_summary[key]=value\n elif type(data).__module__=='numpy':\n serialized_summary=data.tolist()\n else:\n serialized_summary=data\n return serialized_summary", "def test_non_list_of_dicts_arg(self):\n self.assertEqual(self.obj.to_json_string(666), '666')", "def test_to_json(self):\n r = self.SEQ(\"AAGGCC\", name=\"seq1\")\n got = json.loads(r.to_json())\n expect = {\n \"name\": \"seq1\",\n \"seq\": \"AAGGCC\",\n \"moltype\": r.moltype.label,\n \"info\": None,\n \"type\": get_object_provenance(r),\n \"version\": __version__,\n }\n self.assertEqual(got, expect)" ]
[ "0.67845356", "0.64108104", "0.62416023", "0.56160873", "0.5391421", "0.5343686", "0.5216057", "0.5167628", "0.515412", "0.5050066", "0.5041063", "0.5009137", "0.49782172", "0.49490848", "0.49448678", "0.4934982", "0.49224684", "0.4890417", "0.4880887", "0.48737907", "0.48529235", "0.48432943", "0.48413438", "0.4824318", "0.4820835", "0.4818306", "0.47953764", "0.47916627", "0.47857004", "0.47854" ]
0.66377836
1
Returns a json interpretation of a grow population object
def to_json(self): return ["population", self.species_index, self.card_trade_index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_grow_population(json_grow):\n if GrowPopulation.is_valid_pop_grow(json_grow):\n return GrowPopulation(json_grow[1], json_grow[2])\n else:\n raise Exception(\"Invalid json population grow action\")", "def as_json(self):\n # if we don't convert it to a dict we'll get a whole bunch of 'can't be serialized' things\n # match = self.__dict__\n # match.pop('_sa_instance_state', None)\n # for k in match:\n #\n # match['date'] = match['date'].isoformat()\n m = self.__dict__\n m['explosions'] = self.explosions.all()\n m['deaths'] = self.deaths.all()\n m['antagobjs'] = self.antagobjs.all()\n m['uplinkbuys'] = self.uplinkbuys.all()\n m['badassbuys'] = self.badassbuy.all()\n m['populationstats'] = self.populationstats.all()\n\n return dict_to_json(m)", "def parse_proxy_grow_population(json_grow):\n if GrowPopulation.is_valid_proxy_grow(json_grow):\n return GrowPopulation(json_grow[0], json_grow[1])\n else:\n raise Exception(\"Invalid json population grow action\")", "def json(self):\r\n return {\"id\": self.id, \"code\": self.code, \"description\": self.description, \"xCoor\": self.x_coor, \"yCoor\": self.y_coor, \"latitude\": self.latitude,\r\n \"longitude\": self.longitude, \"waterschapId\": self.waterschap_id, \"watertypeId\": self.watertype_id, \"watertypeKrwId\": self.watertype_krw_id}", "def as_json(self):", "def get_json(self):\n return {\n \"power\": self.get_power(), \n \"timestamp\": self.get_timestamp(), \n \"shortage\": self.get_shortage()\n }", "def serialize_pop(population):\n return [ agent.get_weights() for agent in population ]", "def get_json(self):\n json_item = {\"id: \": self.id,\n \"question: \": self.question,\n \"documents: \": self.documents,\n \"document_ids: \": self.document_ids,\n \"gold answers: \": self.gold}\n return json_item", "def json(self):\n return {'name': self.neighbourhood_group, 'neighbourhood': self.room_type}", "def as_json(self) -> str:\n return json.dumps(self, cls=_ProgrammeJSONEncoder)", "def dict(self):\n\t\treturn self.json", "def as_json(self):\n result = super().as_json()\n result[\"generator\"].update({\n \"block\": self.vein.with_purity(100).as_json(),\n \"cluster-size\": self.cluster_size,\n \"type\": \"cluster\",\n })\n return result", "def read_population_distribution(path_POP, occupancy, verbose=False):\n with open(path_POP, 'r') as f:\n jd = json.load(f)\n\n data = jd[occupancy]\n\n # convert peak population to persons/m2\n data['peak'] = data['peak'] / (1000. * ft2)\n\n if verbose:\n pp.pprint(data)\n\n return data", "def to_json(self):\n pass", "def json_friendly(self):", "def json(self):\n return {'User_uuid': self.uuid, 'School_id': self.school_id, 'Earned_points': self.us_dollar}", "def population_information(self):\n outstr = [\"Primary population: {}\\n\".format(self.population.population),\n \"Individuals: {}\\n\".format(self.total),\n \"Discovery populations: {}; Total: {}\\n\".format(\n self.disc_pops.to_simple_str(), self.total_disc\n )]\n for pop in ['european', 'african', 'east_asian',\n 'south_asian', 'hispanic', 'native',\n 'micronesian', 'arab', 'unspecified',\n 'filipino', 'indonesian']:\n outstr.append('\\t{}: {}\\n'.format(pop, eval('self.' + pop)))\n\n outstr.append(\"Replication populations: {}; Total: {}\\n\".format(\n self.rep_pops.to_simple_str(), self.total_rep\n ))\n for pop in ['european', 'african', 'east_asian',\n 'south_asian', 'hispanic', 'native',\n 'micronesian', 'arab', 'unspecified',\n 'filipino', 'indonesian']:\n outstr.append('\\t{}: {}\\n'.format(pop, eval('self.rep_' + pop)))", "def json_data(self):\n self.check_proof()\n return {\n \"vars\": [{'name': v.name, 'T': str(v.T)} for v in self.vars],\n \"proof\": sum([printer.export_proof_item(self.thy, item, unicode=True, highlight=True)\n for item in self.prf.items], []),\n \"report\": self.rpt.json_data(),\n \"method_sig\": self.get_method_sig()\n }", "def get_person_like_json(self):\n return json.dumps(self.get_person())", "def getJSON(self):\n text = super().getJSON() + f', \"exchange\": \"{self.__exchange}\"'\n text += f', \"market pair\": \"{self.__market_pairs}\"'\n text += f', \"interval\": \"{self.__interval}\"}}'\n return text", "def as_json(self):\n\n return {\n \"name\": self.name,\n \"summary\": self.summary.as_json(),\n \"cases\": [case.as_json() for case in self.cases]\n }", "def json(self):\n class ExtendedJSONEncoder(json.JSONEncoder):\n def default(self, obj):\n if isinstance(obj, datetime.date) or isinstance(obj, datetime.time):\n encoded_object = obj.isoformat()\n else:\n encoded_object = json.JSONEncoder.default(self, obj)\n return encoded_object\n\n obj = {\n 'operation': self.operation,\n 'version': self.version,\n 'language': self.language,\n 'identifiers': self.identifiers,\n 'store_execute': self.store_execute,\n 'status': self.status,\n 'lineage': self.lineage,\n 'inputs': dict((i, [inpt.json for inpt in self.inputs[i]]) for i in self.inputs),\n 'outputs': self.outputs,\n 'raw': self.raw\n }\n\n return json.dumps(obj, allow_nan=False, cls=ExtendedJSONEncoder)", "def json(self, update=False):\n return json.dumps(self.export(update=update), indent=4)", "def json(self):\n robot_dict = self.robot_dict()\n target_dict = self.target_dict()\n json_str = '{'\n json_str = json_str + '\"robot_obj\" : ' + json.dumps(robot_dict) + \",\\n\"\n json_str = json_str + '\"target_obj\" : ' + json.dumps(target_dict) + \"\\n\"\n json_str = json_str + '}'\n return(json_str)", "def getPopulation(self):\n\n return self.p", "def GetJSON(self):\n return json.dumps(self.GetDict())", "def to_json(self):\n return json.dumps(self.for_json())", "def serialize(self):\n\n return {\n 'experience': list(self.experience),\n 'iteration': self.iteration,\n 'action_requests': self.action_requests\n }", "def json(self):\n return {\n \"qualified_name\": self.qualified_name,\n \"description\": self.description,\n \"data\": self.data,\n }", "def json(self):\n return {'id': self.id, 'name': self.name, 'description': self.description}" ]
[ "0.6523305", "0.6272748", "0.6193445", "0.6013572", "0.60070646", "0.600554", "0.58979523", "0.5883446", "0.5804956", "0.57915723", "0.577272", "0.5772612", "0.57659566", "0.5764919", "0.56975114", "0.5694334", "0.5686872", "0.5675293", "0.5651329", "0.5649477", "0.5640382", "0.5639673", "0.5629475", "0.5628962", "0.561413", "0.56075037", "0.55974233", "0.5593722", "0.55710053", "0.55676126" ]
0.7153903
0
Use svg formats to display things ploted
def use_svg_display(): display.set_matplotlib_formats('svg')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def use_svg_display(): #@save\n display.set_matplotlib_formats('svg')", "def _repr_svg_(self):\n pass", "def _repr_svg_(self):\n if not IPythonConsole.ipython_useSVG:\n return None\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoSVG(\n mol, size, self.aix, \"\", keku, drawOptions=opts, highlightBonds=self.bix\n )", "def _repr_svg_(self):\n if not IPythonConsole.ipython_useSVG:\n return None\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoSVG(\n mol, size, self.aix, \"\", keku, drawOptions=opts, highlightBonds=self.bix\n )", "def svg(self, scale_factor=..., color=...): # -> str:\n ...", "def __merger_svg(self):\n pass", "def show_svg(tmp_path = DEFAULT_PATH): \n global show_counter\n file_name = tmp_path + \"show_tmp_file_{}.svg\".format(show_counter)\n plt.savefig(file_name)\n os.system(\"open {}\".format(file_name))\n show_counter += 1\n plt.close()", "def to_svg(self, outfile, scaling, precision, attributes):\n outfile.write('<g id=\"')\n outfile.write(self.name.replace(\"#\", \"_\"))\n outfile.write('\" ')\n outfile.write(attributes)\n outfile.write(\">\\n\")\n for polygon in self.polygons:\n polygon.to_svg(outfile, scaling, precision)\n for path in self.paths:\n path.to_svg(outfile, scaling, precision)\n for label in self.labels:\n label.to_svg(outfile, scaling, precision)\n for reference in self.references:\n reference.to_svg(outfile, scaling, precision)\n outfile.write(\"</g>\\n\")", "def _repr_svg_(self):\n try:\n return self.mol._repr_svg_()\n except AttributeError:\n return None", "def save_plot(p, file_name, path='../static/images/'):\n p.output_backend = \"svg\"\n export_svgs(p, filename=path + file_name + '.svg')", "def draw(self, stats=[]):\n clear_output(wait=True)\n svg_html = self.to_html(stats)\n display(svg_html)", "def plot_svg(Sn, fig=None, ax=None, **kwargs):\n fig, ax = fig_ax_getter(fig, ax)\n ax.semilogy(Sn/np.sum(Sn), 'sk', markersize=6)\n ax.set_xlabel('Model order')\n ax.set_ylabel('Normalized magnitude')\n return fig, ax", "def simplestExample():\n\n my_svg = drawSVG.SVG()\n return my_svg", "def wrap_in_html(self,svgofmodel):\n html= '''<html>\\n%s\\n%s\\n%s\\n</g></g></g></svg></body></html>\\n'''\n svgbody= '''<body onload=\"javascript:setTimeout(&quot;location.reload(true);&quot;,%d);\">\\n''' % self.vrefreshms\n svgbody += \"<h4>GeoGad</h4>\"\n svghead= '<svg xmlns=\"http://www.w3.org/2000/svg\" version=\"1.2\" baseProfile=\"tiny\" width=\"%dpx\" height=\"%dpx\">\\n'\n svghead= svghead % (self.vboxX,self.vboxY)\n svghead+= '<rect x=\"1\" y=\"1\" width=\"%d\" height=\"%d\" fill=\"none\" stroke=\"blue\" stroke-width=\"4\"/>\\n'% (self.vboxX,self.vboxY)\n svghead+= '<g fill=\"none\" stroke=\"black\" stroke-width=\"%0.2f\">\\n' % self.vlinewidth\n svghead+= '<g transform=\"scale(%0.2f,%0.2f)\">\\n' % (self.vscaleX,self.vscaleY)\n svghead+= '<g transform=\"translate(%0.2f,%0.2f)\">\\n' % (self.vtranX,self.vtranY)\n return html % (svgbody,svghead,svgofmodel)", "def output_svg(lines, regressions, requested_width, requested_height):\n \n (global_min_x, _), (global_max_x, global_max_y) = bounds(lines)\n max_up_slope, min_down_slope = bounds_slope(regressions)\n \n #output\n global_min_y = 0\n x = global_min_x\n y = global_min_y\n w = global_max_x - global_min_x\n h = global_max_y - global_min_y\n font_size = 16\n line_width = 2\n \n pic_width, pic_height = compute_size(requested_width, requested_height\n , w, h)\n \n def cw(w1):\n \"\"\"Converts a revision difference to display width.\"\"\"\n return (pic_width / float(w)) * w1\n def cx(x):\n \"\"\"Converts a revision to a horizontal display position.\"\"\"\n return cw(x - global_min_x)\n\n def ch(h1):\n \"\"\"Converts a time difference to a display height.\"\"\"\n return -(pic_height / float(h)) * h1\n def cy(y):\n \"\"\"Converts a time to a vertical display position.\"\"\"\n return pic_height + ch(y - global_min_y)\n \n print '<!--Picture height %.2f corresponds to bench value %.2f.-->' % (\n pic_height, h)\n print '<svg',\n print 'width=%s' % qa(str(pic_width)+'px')\n print 'height=%s' % qa(str(pic_height)+'px')\n print 'viewBox=\"0 0 %s %s\"' % (str(pic_width), str(pic_height))\n print 'onclick=%s' % qa(\n \"var event = arguments[0] || window.event;\"\n \" if (event.shiftKey) { highlightRevision(null); }\"\n \" if (event.ctrlKey) { highlight(null); }\"\n \" return false;\")\n print 'xmlns=\"http://www.w3.org/2000/svg\"'\n print 'xmlns:xlink=\"http://www.w3.org/1999/xlink\">'\n \n print \"\"\"\n<defs>\n <marker id=\"circleMark\"\n viewBox=\"0 0 2 2\" refX=\"1\" refY=\"1\"\n markerUnits=\"strokeWidth\"\n markerWidth=\"2\" markerHeight=\"2\"\n orient=\"0\">\n <circle cx=\"1\" cy=\"1\" r=\"1\"/>\n </marker>\n</defs>\"\"\"\n \n #output the revisions\n print \"\"\"\n<script type=\"text/javascript\">//<![CDATA[\n var previousRevision;\n var previousRevisionFill;\n var previousRevisionStroke\n function highlightRevision(id) {\n if (previousRevision == id) return;\n\n document.getElementById('revision').firstChild.nodeValue = 'r' + id;\n document.getElementById('rev_link').setAttribute('xlink:href',\n 'http://code.google.com/p/skia/source/detail?r=' + id);\n \n var preRevision = document.getElementById(previousRevision);\n if (preRevision) {\n preRevision.setAttributeNS(null,'fill', previousRevisionFill);\n preRevision.setAttributeNS(null,'stroke', previousRevisionStroke);\n }\n \n var revision = document.getElementById(id);\n previousRevision = id;\n if (revision) {\n previousRevisionFill = revision.getAttributeNS(null,'fill');\n revision.setAttributeNS(null,'fill','rgb(100%, 95%, 95%)');\n \n previousRevisionStroke = revision.getAttributeNS(null,'stroke');\n revision.setAttributeNS(null,'stroke','rgb(100%, 90%, 90%)');\n }\n }\n//]]></script>\"\"\"\n \n def print_rect(x, y, w, h, revision):\n \"\"\"Outputs a revision rectangle in display space,\n taking arguments in revision space.\"\"\"\n disp_y = cy(y)\n disp_h = ch(h)\n if disp_h < 0:\n disp_y += disp_h\n disp_h = -disp_h\n \n print '<rect id=%s x=%s y=%s' % (qa(revision), qa(cx(x)), qa(disp_y),),\n print 'width=%s height=%s' % (qa(cw(w)), qa(disp_h),),\n print 'fill=\"white\"',\n print 'stroke=\"rgb(98%%,98%%,88%%)\" stroke-width=%s' % qa(line_width),\n print 'onmouseover=%s' % qa(\n \"var event = arguments[0] || window.event;\"\n \" if (event.shiftKey) {\"\n \" highlightRevision('\"+str(revision)+\"');\"\n \" return false;\"\n \" }\"),\n print ' />'\n \n xes = set()\n for line in lines.itervalues():\n for point in line:\n xes.add(point[0])\n revisions = list(xes)\n revisions.sort()\n \n left = x\n current_revision = revisions[0]\n for next_revision in revisions[1:]:\n width = (((next_revision - current_revision) / 2.0)\n + (current_revision - left))\n print_rect(left, y, width, h, current_revision)\n left += width\n current_revision = next_revision\n print_rect(left, y, x+w - left, h, current_revision)\n\n #output the lines\n print \"\"\"\n<script type=\"text/javascript\">//<![CDATA[\n var previous;\n var previousColor;\n var previousOpacity;\n function highlight(id) {\n if (previous == id) return;\n\n document.getElementById('label').firstChild.nodeValue = id;\n\n var preGroup = document.getElementById(previous);\n if (preGroup) {\n var preLine = document.getElementById(previous+'_line');\n preLine.setAttributeNS(null,'stroke', previousColor);\n preLine.setAttributeNS(null,'opacity', previousOpacity);\n\n var preSlope = document.getElementById(previous+'_linear');\n if (preSlope) {\n preSlope.setAttributeNS(null,'visibility', 'hidden');\n }\n }\n\n var group = document.getElementById(id);\n previous = id;\n if (group) {\n group.parentNode.appendChild(group);\n \n var line = document.getElementById(id+'_line');\n previousColor = line.getAttributeNS(null,'stroke');\n previousOpacity = line.getAttributeNS(null,'opacity');\n line.setAttributeNS(null,'stroke', 'blue');\n line.setAttributeNS(null,'opacity', '1');\n \n var slope = document.getElementById(id+'_linear');\n if (slope) {\n slope.setAttributeNS(null,'visibility', 'visible');\n }\n }\n }\n//]]></script>\"\"\"\n for label, line in lines.items():\n print '<g id=%s>' % qa(label)\n r = 128\n g = 128\n b = 128\n a = .10\n if label in regressions:\n regression = regressions[label]\n min_slope = regression.find_min_slope()\n if min_slope < 0:\n d = max(0, (min_slope / min_down_slope))\n g += int(d*128)\n a += d*0.9\n elif min_slope > 0:\n d = max(0, (min_slope / max_up_slope))\n r += int(d*128)\n a += d*0.9\n \n slope = regression.slope\n intercept = regression.intercept\n min_x = regression.min_x\n max_x = regression.max_x\n print '<polyline id=%s' % qa(str(label)+'_linear'),\n print 'fill=\"none\" stroke=\"yellow\"',\n print 'stroke-width=%s' % qa(abs(ch(regression.serror*2))),\n print 'opacity=\"0.5\" pointer-events=\"none\" visibility=\"hidden\"',\n print 'points=\"',\n print '%s,%s' % (str(cx(min_x)), str(cy(slope*min_x + intercept))),\n print '%s,%s' % (str(cx(max_x)), str(cy(slope*max_x + intercept))),\n print '\"/>'\n \n print '<polyline id=%s' % qa(str(label)+'_line'),\n print 'onmouseover=%s' % qa(\n \"var event = arguments[0] || window.event;\"\n \" if (event.ctrlKey) {\"\n \" highlight('\"+str(label).replace(\"'\", \"\\\\'\")+\"');\"\n \" return false;\"\n \" }\"),\n print 'fill=\"none\" stroke=\"rgb(%s,%s,%s)\"' % (str(r), str(g), str(b)),\n print 'stroke-width=%s' % qa(line_width),\n print 'opacity=%s' % qa(a),\n print 'points=\"',\n for point in line:\n print '%s,%s' % (str(cx(point[0])), str(cy(point[1]))),\n print '\"/>'\n\n print '</g>'\n\n #output the labels\n print '<text id=\"label\" x=\"0\" y=%s' % qa(font_size),\n print 'font-size=%s> </text>' % qa(font_size)\n\n print '<a id=\"rev_link\" xlink:href=\"\" target=\"_top\">'\n print '<text id=\"revision\" x=\"0\" y=%s style=\"' % qa(font_size*2)\n print 'font-size: %s; ' % qe(font_size)\n print 'stroke: #0000dd; text-decoration: underline; '\n print '\"> </text></a>'\n\n print '</svg>'", "def send_svg():\n state = request.get_json()\n path = os.path.dirname(__file__).replace('core', 'resources/tmp')\n filename = path + \"/\" + now_date(str=True) + \"-roast.png\"\n cairosvg.svg2png(bytestring=state['svg'], write_to=filename)\n return jsonify({'success': True})", "def svg(self, scale_factor=1., stroke_color=None, opacity=None):\n if self.is_empty:\n return '<g />'\n if stroke_color is None:\n stroke_color = \"#66cc99\" if self.is_valid else \"#ff3333\"\n return '<g>' + \\\n ''.join(p.svg(scale_factor, stroke_color, opacity) for p in self.geoms) + \\\n '</g>'", "def to_svg(self, separate=False, include_junctions=False):\n serialize_as_svg(self.output, separate, include_junctions)", "def render_svg(svg):\n b64 = base64.b64encode(svg.encode('utf-8')).decode(\"utf-8\")\n html = r'<img src=\"data:image/svg+xml;base64,%s\"/>' % b64\n st.write(html, unsafe_allow_html=True)", "def _repr_html_(self):\n nb_ticks = 7\n delta_x = math.floor(self.width / (nb_ticks - 1))\n x_ticks = [(i) * delta_x for i in range(0, nb_ticks)]\n delta_val = delta_x * (self.vmax - self.vmin) / self.width\n val_ticks = [round(self.vmin + (i) * delta_val, 1) for i in range(0, nb_ticks)]\n\n return (\n f'<svg height=\"40\" width=\"{self.width}\">'\n + \"\".join(\n [\n (\n '<line x1=\"{i}\" y1=\"15\" x2=\"{i}\" '\n 'y2=\"27\" style=\"stroke:{color};stroke-width:2;\" />'\n ).format(\n i=i * 1,\n color=self.rgba_hex_str(\n self.vmin + (self.vmax - self.vmin) * i / (self.width - 1),\n ),\n )\n for i in range(self.width)\n ],\n )\n + '<text x=\"0\" y=\"38\" style=\"text-anchor:start; font-size:11px; font:Arial\">{}</text>'.format( # noqa\n self.vmin,\n )\n + \"\".join(\n [\n (\n '<text x=\"{}\" y=\"38\"; style=\"text-anchor:middle; font-size:11px; font:Arial\">{}</text>' # noqa\n ).format(x_ticks[i], val_ticks[i])\n for i in range(1, nb_ticks - 1)\n ],\n )\n + '<text x=\"{}\" y=\"38\" style=\"text-anchor:end; font-size:11px; font:Arial\">{}</text>'.format(\n self.width,\n self.vmax,\n )\n + '<text x=\"0\" y=\"12\" style=\"font-size:11px; font:Arial\">{}</text>'.format(\n self.caption,\n )\n + \"</svg>\"\n )", "def save_as_svg(file_name, path = DEFAULT_PATH):\n plt.ioff()\n plt.savefig(path + file_name + '.svg')\n plt.close()", "def generateSVG(self, scale=1, noScale=None):\n return self.formatEval(\n self.TEMPLATES[self.attrs['name']]['SVG'],\n self.attrs,\n scale = scale,\n noScale = noScale\n )", "def __make_svg(self):\n if not self._items:\n return None\n\n # define call back functions for node format, href, subgraph\n def fnc_node_format(n):\n if (n.type, n.output_name, n.task_name, n.shard_idx) in self._items:\n return self._items[(n.type, n.output_name, n.task_name, n.shard_idx)][0]\n else:\n return None\n\n def fnc_href(n):\n if (n.type, n.output_name, n.task_name, n.shard_idx) in self._items:\n return self._items[(n.type, n.output_name, n.task_name, n.shard_idx)][1]\n else:\n return None\n\n def fnc_subgraph(n):\n if (n.type, n.output_name, n.task_name, n.shard_idx) in self._items:\n return self._items[(n.type, n.output_name, n.task_name, n.shard_idx)][2]\n else:\n return None\n\n # convert to dot string\n dot_str = self._dag.to_dot(\n fnc_node_format=fnc_node_format,\n fnc_href=fnc_href,\n fnc_subgraph=fnc_subgraph,\n template=self._template_d,\n )\n\n with tempfile.TemporaryDirectory() as tmp_dir:\n # temporary dot, svg from graphviz.Source.render\n tmp_dot = os.path.join(tmp_dir, '_tmp_.dot')\n\n try:\n svg = Source(dot_str, format='svg').render(filename=tmp_dot)\n except (ExecutableNotFound, FileNotFoundError):\n logger.error(\n 'Importing graphviz failed. Task graph will not be available. '\n 'Check if you have installed graphviz correctly so that '\n '\"dot\" executable exists on your PATH. '\n '\"pip install graphviz\" does not install such \"dot\". '\n 'Use apt or system-level installer instead. '\n 'e.g. sudo apt-get install graphviz.'\n )\n return None\n\n # save to DOT\n uri_dot = os.path.join(\n self._out_dir,\n CrooHtmlReportTaskGraph.TASK_GRAPH_DOT.format(\n workflow_id=self._workflow_id\n ),\n )\n AutoURI(uri_dot).write(dot_str, no_lock=True)\n\n # save to SVG\n uri_svg = os.path.join(\n self._out_dir,\n CrooHtmlReportTaskGraph.TASK_GRAPH_SVG.format(\n workflow_id=self._workflow_id\n ),\n )\n svg_contents = AutoURI(svg).read()\n AutoURI(uri_svg).write(svg_contents, no_lock=True)\n\n return svg_contents", "def svg2png (fName, width=600, app=None, oFilename=\"\"):\n from PyQt5.QtSvg import QSvgRenderer\n from PyQt5.QtGui import QImage, QPainter, QColor, QGuiApplication\n from math import sqrt\n\n if not app:\n app=QGuiApplication([])\n svg, w, h = openSVG(fName)\n groups = svg.getElementsByTagName(\"g\")\n scale = width/w\n for g in groups:\n if \"stroke-width\" in g.attributes:\n g.setAttribute(\"stroke-width\", str(float(g.getAttribute(\"stroke-width\"))/sqrt(scale)))\n qsr=QSvgRenderer(svg.toxml().encode(\"utf-8\"))\n img=QImage(int(w*scale), int(h*scale), QImage.Format_ARGB32)\n img.fill(QColor(\"white\"))\n p=QPainter(img)\n qsr.render(p)\n p.end()\n if not oFilename:\n oFilename = re.sub(r\"\\.svg$\", f\"-{width}px.png\", fName)\n img.save(oFilename)\n return oFilename", "def graphRep(input_seq, bracket_str):\n proc = subprocess.Popen(['RNAplot', '-o', 'svg'], stdin=subprocess.PIPE,)\n input_str = input_seq + '\\n' + bracket_str;\n proc.communicate(input_str)\n\n file = open('rna.svg', 'r')\n graph_svg = markdown(file.read())\n return graph_svg", "def output_svg(self, string_to_output):\n self._output_object.add_report(string_to_output)", "def create_svg_name(self):\n for l in self.data:\n d = self.get_line_desc(l)\n self.svgouts[tuple(l)] = self.create_svg(d)", "def create_graphic(X):\n plt.close('all')\n plt.figure(figsize=(12,6))\n sns.set(style='darkgrid', palette='bright')\n for i,j in enumerate(X): \n plt.subplot(2, 3, (i+1))\n plt.text(X[j], 0, X[j], color='black')\n plt.axvline(x=X[j], linestyle='--', c='red')\n sns.distplot(data[j].dropna(), bins=30, kde=False)\n plt.tight_layout()\n img = io.BytesIO()\n plt.savefig(img, format='png')\n img.seek(0)\n graph_url = base64.b64encode(img.getvalue()).decode()\n graph = 'data:image/png;base64,{}'.format(graph_url)\n return graph", "def init_svg(self):\n self.svg = self.doc.createElement('svg')\n halfwidth = self.radius+self.daytick_space+self.daytick_monthsize+\\\n self.padding\n dimension = 2*halfwidth\n attr = {'xmlns':'http://www.w3.org/2000/svg', 'version':'1.1',\n 'xmlns:xlink':'http://www.w3.org/1999/xlink',\n 'viewBox':'0 0 %d %d'%(dimension,dimension),\n 'height':'%din'%self.inches, 'width':'%din'%self.inches, \n 'preserveAspectRatio':'xMinYMid meet',\n 'stroke':'black', 'fill':'none',\n 'font-family':'Arial', 'font-size':10}\n for k,v in attr.items(): self.svg.setAttribute(k,conv(v))\n # Create the clipping path for the interior region of the chart.\n self.defs = self.make_element(self.svg, 'defs')\n clip = self.make_element(\n self.defs, 'clipPath', ('id', 'innerClipPath'))\n self.make_element(\n clip, 'circle', ('cx',0), ('cy',0), ('r',self.radius))\n # Make 0,0 the center of the circle.\n self.centered = self.doc.createElement('g')\n self.centered.setAttribute('transform','translate(%d,%d)'%(\n 2*(halfwidth,)))\n self.svg.appendChild(self.centered)", "def svg(self) -> str:\n data = {\n 'x': self.x,\n 'y': self.y,\n 'width': self.width,\n 'height': self.height,\n 'text_x': self.x + 30,\n 'text_y': self.y + 20,\n 'name': self.person.name\n }\n return PERSON_BOX_TEMPLATE.format(**data)" ]
[ "0.8240741", "0.80497175", "0.7145356", "0.7145356", "0.7055136", "0.68867075", "0.6879458", "0.6758306", "0.6727566", "0.6718365", "0.6621455", "0.6587107", "0.6476702", "0.64378935", "0.6431482", "0.6402853", "0.63732785", "0.62872374", "0.6284717", "0.6247871", "0.62346727", "0.6226391", "0.62215966", "0.62146103", "0.62141585", "0.620827", "0.62074023", "0.61792654", "0.6169869", "0.6168585" ]
0.80888575
1
Set the plot view size to figsize
def set_figsize(figsize=(3.5, 2.5)): use_svg_display() plt.rcParams['figure.figsize'] = figsize
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_figsize(figsize=(3.5, 2.5)): #@save\n use_svg_display()\n d2l.plt.rcParams['figure.figsize'] = figsize", "def figure(figsize=None,xSize=3.5,ySize=3.5,dpi=600,kw_setup=dict(),**kw):\n plot_setup(**kw_setup)\n if (figsize is not None):\n xSize = figsize[0]\n ySize = figsize[1]\n return plt.figure(figsize=(xSize,ySize),dpi=dpi,**kw)", "def setAxisPageSize(width,height):\n dislin.axslen(width,height)", "def __init__(self):\n\n fig_width_pt = 800.0 \n pylab.rcParams.update(plot_params)", "def set_figure_size(self):\n lims, _ = self.set_lims()\n size_fac = 50\n paperSizeFac = 0.65\n one_dec = 1.6\n xdecs = np.log10(lims(1)) - np.log10(lims(0))\n one_dec = one_dec * 4 / xdecs\n ydecs = np.log10(lims[3]) - np.log10(lims[2])\n paper_width = xdecs * one_dec\n paper_height = (ydecs + 3) * one_dec\n paper_height = min([paper_height, 9])\n rectScreen = [0.5, 0.5, paper_width, paper_height] * size_fac\n rectPaper = [1.0, 1.0, paper_width * paperSizeFac, paper_height * paperSizeFac]\n\n rectRho = [0.15, 0.15 + 2.3 / (ydecs + 3), 0.8, ydecs / (ydecs + 3) * 0.8]\n rectPhi = [0.15, 0.15, 0.8, 2 / (ydecs + 3) * 0.8]\n rects = {\n \"Screen\": rectScreen,\n \"Paper\": rectPaper,\n \"Rho\": rectRho,\n \"Phi\": rectPhi,\n }\n return rects", "def __init__(self, nx, ny, nxsize=5.4, nysize=6.2):\n self.nx = nx\n self.ny = ny\n self.n = 1\n plt.figure(figsize=(nysize*ny, nxsize*nx))\n plt.subplot(nx, ny, self.n)", "def plot_insertsize():", "def autosize(self):\n if not self._tightLayout:\n self.fig.subplots_adjust(left=0.125, bottom=0.125, top=0.9,\n right=0.875,\n wspace=0.35, hspace=0.2)\n else:\n self.fig.tight_layout(rect=[0, 0.03, 1, 0.95])\n\n self.draw()", "def Pane_Resized( self, new_sizes ):\r\n if(new_sizes[0] > 200 ):\r\n cb.xtotal = new_sizes[0]-100\r\n self.canvas_one.config(width = new_sizes[0])\r\n self.canvas_scale.config(width = new_sizes[0])\r\n else:\r\n cb.xtotal = 200-100\r\n self.canvas_one.config(width = 200)\r\n self.canvas_scale.config(width = 200)\r\n if (len(new_sizes) > 1 ):\r\n self.canvas_two.config(width=new_sizes[1])\r\n self.system.Draw()", "def plot_settings(clear = True, grid = True):\n if clear:\n plt.clf() # Clears any previous figures\n\n # Setting figure size\n figure = plt.gcf()\n figure.set_size_inches(18, 10)\n\n # Setting size of plot elements\n plt.rc('axes', labelsize = 22, titlesize = 24) \n plt.rc('xtick', labelsize = 18) \n plt.rc('ytick', labelsize = 18) \n plt.rc('legend', fontsize = 20)\n plt.rc('axes', axisbelow = True) # Ensures that the grid is behind any graph elements\n if grid:\n plt.grid() # Adds a grid to the plot", "def format_size(self, WIDTH, HEIGHT):\n self._fig.update_layout(\n width=WIDTH,\n height=HEIGHT,\n )", "def _set_size(self):\n if self.width_key is not None:\n width = config.get(self.width_key)\n height = config.get(self.height_key)\n self.window.resize(width, height)", "def set_widget_size(self, widget_size):\n v = self.viewport\n v.projection.widget_rect = Rect(\n mins=[0, 0],\n maxes=[widget_size[0], widget_size[1]])\n v.view.widget_size = v.projection.widget_rect.sizes", "def plot_set_R_figsize(**magic_R_args):\n if load_ext_rpy2_ipython():\n extend_magic_R_with_defaults.default_line = format_magic_R_args(**magic_R_args)\n return plot_get_R_figsize()", "def setwinsize(self, rows, cols):", "def frame():\n fig = plt.figure(figsize = (6, 3))\n\n plt.subplots_adjust(left=.15, bottom=.2, right=.95, top=.9)\n ax = fig.add_subplot(111)\n \n ax.tick_params(axis=\"x\", labelsize=12)\n ax.tick_params(axis=\"y\", labelsize=12)\n\n return fig, ax", "def set_size(self, size):\n \n self.width = size[0]\n self.height = size[1]", "def set_size(width, fraction=1):\n # Width of figure\n fig_width_pt = width * fraction\n # Convert from pt to inches\n inches_per_pt = 1 / 72.27\n # Golden ratio to set aesthetic figure height\n golden_ratio = (5**.5 - 1) / 2\n # Figure width in inches\n fig_width_in = fig_width_pt * inches_per_pt\n # Figure height in inches\n fig_height_in = fig_width_in * golden_ratio\n fig_dim = (fig_width_in, fig_height_in)\n return fig_dim", "def SetWindowSize(self, size):\n self.WINDOW_SIZE = size", "def set_canvas_size(self, width, height):\n self.canvas.config(width = int(width), height = int(height))", "def set_pointsize(self, pointsize):\n\tself.m_pointsize = pointsize", "def size(self, size):\n self.width = size\n self.height = size", "def size(self, size):\n self.width = size\n self.height = size", "def set_size(width, fraction=1):\n # Width of figure\n fig_width_pt = width * fraction\n\n # Convert from pt to inches\n inches_per_pt = 1 / 72.27\n\n # Golden ratio to set aesthetic figure height\n #golden_ratio = (5**.5 - 1) / 2\n andrews_ratio = 2.0/4.0\n\n # Figure width in inches\n fig_width_in = fig_width_pt * inches_per_pt\n # Figure height in inches\n fig_height_in = fig_width_in * andrews_ratio\n\n fig_dim = (fig_width_in, fig_height_in)\n\n return fig_dim", "def size(self):\n return self._ax.size", "def resize(self, size):\n self.widget.resize(*size)", "def generate_figure(figsize=(2, 2), xlim=[0, 1], ylim=[0, 1]):\n plt.figure(figsize=figsize)\n plt.grid()\n plt.xlim(xlim)\n plt.ylim(ylim)\n plt.xlabel('$\\mathrm{Re}$')\n plt.ylabel('$\\mathrm{Im}$')", "def set_size(width, fraction=1):\n # Width of figure\n fig_width_pt = width * fraction\n\n # Convert from pt to inches\n inches_per_pt = 1 / 72.27\n\n # Golden ratio to set aesthetic figure height\n golden_ratio = (5**.5 - 1) / 2\n\n # Figure width in inches\n fig_width_in = fig_width_pt * inches_per_pt\n # Figure height in inches\n fig_height_in = fig_width_in * golden_ratio\n\n fig_dim = (fig_width_in, fig_height_in)\n\n return fig_dim", "def set_canvas_size(self, width_npix, height_npix):\n\n self.variables.canvas_width = width_npix\n self.variables.canvas_height = height_npix\n if self.variables.canvas_image_object is not None:\n self.variables.canvas_image_object.canvas_nx = width_npix\n self.variables.canvas_image_object.canvas_ny = height_npix\n self.config(width=width_npix, height=height_npix)", "def set_size(width_pt=472.03123, fraction=1, aspect_ratio=0.6180339887498949, subplots=(1, 1)):\n # Width of figure (in pts)\n fig_width_pt = width_pt * fraction\n # Convert from pt to inches\n inches_per_pt = 1 / 72.27\n\n # Figure width in inches\n fig_width_in = fig_width_pt * inches_per_pt\n # Figure height in inches\n fig_height_in = fig_width_in * aspect_ratio * (subplots[0] / subplots[1])\n\n return (fig_width_in, fig_height_in)" ]
[ "0.78902227", "0.6751782", "0.6596949", "0.65897006", "0.65781975", "0.6517747", "0.6500645", "0.64662874", "0.6454464", "0.64407265", "0.63999575", "0.63668054", "0.6301865", "0.6289062", "0.6275893", "0.62544805", "0.6221664", "0.6207713", "0.61245376", "0.6113087", "0.6098473", "0.60825", "0.60825", "0.6071935", "0.6070559", "0.6070501", "0.60702604", "0.60660905", "0.60530317", "0.6039518" ]
0.7844332
1
Plot some data images of FashionMnistDataset in one line with labels
def show_fashion_mnist(images, labels): use_svg_display() # the '_' here means we don't need that parameter # here init a figure to plot images _, figs = plt.subplots(1, len(images), figsize=(12, 12)) for f, img, lbl in zip(figs, images, labels): # show the image f.imshow(img.view(28, 28).numpy()) # set title f.set_title(lbl) # hide the x and y axis f.axes.get_xaxis().set_visible(False) f.axes.get_yaxis().set_visible(False) # show the plot figure plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_labels(lbl: scipy.ndarray, lbl_count: int) -> None:\n color_map = scipy.rand(lbl_count, 3)\n color_map = matplotlib.colors.ListedColormap(color_map)\n plt.imshow(lbl, cmap=color_map)\n plt.show()", "def visualize(**images):\n n_images = len(images)\n plt.figure(figsize=(20,8))\n for idx, (name, image) in enumerate(images.items()):\n plt.subplot(1, n_images, idx + 1)\n plt.xticks([]); \n plt.yticks([])\n # get title from the parameter names\n plt.title(name.replace('_',' ').title(), fontsize=20)\n plt.imshow(image)\n plt.savefig('sample_gt_pred_2_max.jpeg')\n plt.show()", "def plot_image(img, label=\"\"): \n if img.shape[0] == 3:\n img = img.transpose(1,2,0)\n fig,ax = plt.subplots(1)\n sns.set_style('white')\n ax.imshow(np.asarray(img))\n if label!=\"\":\n plt.title(number_label[label])\n return fig,ax", "def plot_images(images, labels, nrows, ncols, cls_true=None, cls_pred=None, grey=False):\n fig, axes = plt.subplots(nrows, ncols, figsize=(16, 2*nrows))\n\n for i, ax in enumerate(axes.flat): \n if grey:\n ax.imshow(images[i,:,:,0], cmap='binary')\n else:\n ax.imshow(images[i])\n\n ax.set_xticks([]); ax.set_yticks([])\n if labels:\n ax.set_title(labels[i])", "def show_train_images(train_data, train_labels):\n plt.figure(1, figsize=(8, 8))\n n = 0\n\n for i in range(16):\n n += 1\n # each time random images are loaded\n # r = np.random.randint(0, train_data.shape[0], 1)\n plt.subplot(4, 4, n)\n plt.subplots_adjust(hspace=0.5, wspace=0.5)\n plt.imshow(train_data[i] / 255.)\n plt.title('{}'.format(train_labels[i]))\n plt.xticks([]), plt.yticks([])\n plt.show()", "def show(image,label,pred):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n s=\"True Label : \"+str(label)+\" Predicted label : \"+str(pred)\n pyplot.xlabel(s,fontname=\"Arial\", fontsize=20 )\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()", "def display_image_with_label(index):\n\n image = df_train.ix[index, :].values\n label = labels.values[index]\n\n plt.axis('off')\n plt.imshow(image.reshape(image_width, image_height), cmap=cm.binary)\n print(\"It is a {}\".format(label))", "def plot_labelled_images(x, y, filename, y_predict=None):\n\n plt.clf()\n\n nplot = 10\n nrows = 2\n ncols = 5\n\n # randomly choose which images from the dataset to plot \n random_indices = np.random.choice(x.shape[0], size=nplot, replace=False)\n\n figure = plt.gcf()\n\n for i, index in enumerate(random_indices):\n ax = figure.add_subplot(nrows, ncols, i + 1, xticks=[], yticks=[])\n\n # plot image\n ax.imshow(np.squeeze(x[index]))\n\n # add label as title of image\n label_index = np.argmax(y[index])\n label = label_names[label_index]\n\n # if predicted labels have been supplied in addition to true labels, show both\n if y_predict is not None:\n predicted_label_index = np.argmax(y_predict[index])\n predicted_label = label_names[predicted_label_index]\n title = \"true={}\\n(predicted={})\".format(label, predicted_label)\n\n # else only show true labels\n else:\n title = \"true={}\".format(label)\n\n ax.set_title(title)\n\n size = figure.get_size_inches()\n figure.set_size_inches(size[0]*2, size[1]*2)\n\n plt.savefig(filename, bbox_inches='tight')", "def plot_predictions(images, filename):\n imagex = format_image(images, 4)\n mosaic = create_mosaic(imagex, 2, 2)\n plt.figure(figsize=(12, 12))\n plt.imshow(mosaic, cmap='gray')\n plt.axis('off')\n plt.savefig(filename + '.png', bbox_inches='tight')", "def train_nn(train_nn_results, label, title, yaxis):\n plt.figure(figsize=(12,5))\n for i in range(len(label)):\n plt.plot(train_nn_results[i], label=label[i], alpha=0.75)\n plt.title(title)\n plt.xlabel('epoch')\n plt.ylabel(yaxis)\n plt.legend(bbox_to_anchor=(1.05, 1.0), loc='upper left')\n plt.tight_layout()\n plt.show()", "def showData(data, labels, truelabels=None):\n\n n = data.shape[0]\n colors = np.dot(labels,np.arange(2)).reshape([-1]) # for color-coding labels\n\n plt.figure()\n plt.scatter(data[:,0],data[:,1], c=colors, s=40)\n\n\n # identify incorrectly labeled examples with an x colored with the correct class\n if truelabels is not None:\n incorrect_idx = []\n truecolors = np.dot(truelabels,np.arange(2)).reshape([-1])\n for i in range(n):\n if not isgoodprediction(labels[i,:], truelabels[i,:]):\n incorrect_idx.append(i)\n plt.scatter( data[incorrect_idx,0], data[incorrect_idx,1],s=50, c='k', marker='x',lw=5 ,label='misclassified')\n\n plt.legend()\n plt.axes().set_aspect('equal', 'datalim')\n plt.show()", "def mnist(path):\n with open(path, 'r') as f:\n for line in f:\n data = line.strip().split(',')\n\n # Label is a vector with one element per class\n label = [0.0] * 10\n label[int(data[0])] = 1.0 \n\n # The data are images of 28x28 pixels\n image_array = np.asfarray(data[1:]).reshape((28, 28))\n # Normalize the pictures \n image_array = image_array / 255.0\n\n #plt.imshow(image_array, cmap='Greys', interpolation='None')\n yield (image_array, label)", "def visualize(**images):\n n = len(images)\n plt.figure(figsize=(16, 5))\n for i, (name, image) in enumerate(images.items()):\n plt.subplot(1, n, i + 1)\n plt.xticks([])\n plt.yticks([])\n plt.title(' '.join(name.split('_')).title())\n plt.imshow(image)\n plt.show()", "def visualize(**images):\n n = len(images)\n plt.figure(figsize=(16, 5))\n for i, (name, image) in enumerate(images.items()):\n plt.subplot(1, n, i + 1)\n plt.xticks([])\n plt.yticks([])\n plt.title(' '.join(name.split('_')).title())\n plt.imshow(image)\n plt.show()", "def visualize(**images):\n n = len(images)\n plt.figure(figsize=(16, 5))\n for i, (name, image) in enumerate(images.items()):\n plt.subplot(1, n, i + 1)\n plt.xticks([])\n plt.yticks([])\n plt.title(' '.join(name.split('_')).title())\n plt.imshow(image)\n plt.show()", "def show(image, label, weights, prediction, ax):\n global img_objects\n if len(img_objects)==0:\n for i in range(10):\n _img = ax[0, i].imshow(weights[i].reshape(28,28), cmap='gray')\n img_objects.append(_img)\n _img = ax[1, 5].imshow(image.reshape(28,28), cmap='gray')\n img_objects.append(_img)\n else:\n for i in range(10):\n img_objects[i].set_data(weights[i].reshape(28,28))\n img_objects[i].set_clim(vmin=0, vmax=np.max(weights[i]))\n img_objects[10].set_data(image.reshape(28,28))\n ax[0,5].set_title('truth: %d, predict: %d'%(np.argmax(label), prediction))", "def visualize_MTL(**images):\r\n n = len(images)\r\n plt.figure(figsize=(16, 5))\r\n for i, (name, image) in enumerate(images.items()):\r\n if image==None:\r\n continue\r\n else:\r\n plt.subplot(1, n, i + 1)\r\n plt.xticks([])\r\n plt.yticks([])\r\n plt.title(' '.join(name.split('_')).title())\r\n plt.imshow(image)\r\n plt.show()", "def ploter(self):\n if len(self.dataset[self.first_title]) != 2:\n print('plot is only avilable for two features')\n return\n x_axis = []\n y_axis = []\n for title in self.dataset:\n x_axis.append(self.dataset[title][0])\n y_axis.append(self.dataset[title][1])\n plt.plot(x_axis, y_axis, 'o')\n plt.show()", "def show_feat(feat_map):\n for i in range(feat_map.shape[0]):\n plt.imshow(feat_map[i])\n plt.show()", "def show_image(self, image_set='train', index=None, interactive_mode=True):\n if interactive_mode:\n plt.ion()\n else:\n plt.ioff()\n\n if image_set == 'train':\n target = self.train_dataset\n else:\n target = self.test_dataset\n\n if index is None:\n index = randint(0, len(target['data']))\n\n plt.figure(num=self.LABELS[target['labels'][index]])\n plt.imshow(target['data'][index])\n plt.show()", "def plot_dataset(features, labels, nb_classes: int) -> None:\n sns.scatterplot(x=features[:, 0], y=features[:, 1], hue=labels, markers=True)\n plt.title(f'Data from {nb_classes} classes')\n save_plot('mock_dataset')", "def plot_data(self):\n # plot every log image\n for log_img in self.log_img_map.itervalues():\n log_img.plot()", "def plot_image_transformation(metadata, data_dir, index=1):\n transform = transforms.Compose([\n transforms.Resize((64, 64)),\n transforms.RandomCrop(54),\n transforms.ToTensor(),\n ])\n\n image_path = '{}/{}.png'.format(data_dir, index)\n\n image = SVHNImage(metadata[index-1]['metadata'], image_path,\n crop_percent=0.3, transform=transform)\n orig_image = image.image()\n bounded_image = image.bounded_image()\n cropped_image = image.cropped_image()\n transformed_image = image.transformed_image()\n\n tensor_to_image(orig_image, save='./figures/original_image.png')\n tensor_to_image(bounded_image, save='./figures/bounded_image.png')\n tensor_to_image(cropped_image, save='./figures/cropped_image.png')\n tensor_to_image(transformed_image, save='./figures/transformed_image.png')", "def plot_image(ax, example, training=True):\n ax.grid(False) # hide grid lines\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n\n if training:\n prefix = example['metadata'][0]['image_prefix'] / \"training/image_2/\"\n else:\n prefix = example['metadata'][0]['image_prefix'] / \"testing/image_2/\"\n\n image_file = prefix / Path(f\"{example['metadata'][0]['image_idx']:06d}\" + \".png\")\n img = plt.imread(image_file)\n ax.imshow(img)", "def show_one(img):\n dpi = 40\n margin = 0.05\n nda = sitk.GetArrayFromImage(img)\n spacing = img.GetSpacing()\n extent = (0, nda.shape[1] * spacing[1], nda.shape[0] * spacing[0], 0)\n figsize = (5, 5)\n fig = plt.figure(figsize=figsize, dpi=dpi)\n ax = fig.add_axes([margin, margin, 1 - 2 * margin, 1 - 2 * margin])\n\n plt.set_cmap(\"gray\")\n ax.imshow(nda, extent=extent, interpolation=None)", "def visualize(**images):\n n = len(images)\n plt.figure(figsize=(16, 5))\n for i, (name, image) in enumerate(images.items()):\n plt.subplot(1, n, i + 1)\n plt.xticks([])\n plt.yticks([])\n plt.title(' '.join(name.split('_')).title())\n plt.imshow(image)\n plt.show()\n # plt.savefig('./drive/My Drive/Colab Notebooks/TACK/Large/result' + ' '.join(name.split('_')).title() + '.png')", "def display_sample_images(self):\n if self.train_dataset is None:\n self.init_datasets()\n\n images, labels = next(self.train_dataset)\n plt.figure(figsize=(5,5))\n for n in range(min(25, images.shape[0])):\n ax = plt.subplot(5,5,n+1)\n plt.imshow(images[n])\n if len(labels.shape) == 1:\n plt.title(self.class_names[int(labels[n])].title())\n else:\n m = np.argmax(labels[n])\n plt.title(self.class_names[int(labels[n, m])].title())\n plt.axis('off')\n\n plt.tight_layout()\n plt.show()", "def plot_preds(image, preds):\n plt.imshow(image)\n plt.axis('off')\n\n plt.figure()\n order = list(reversed(range(len(preds))))\n bar_preds = [pr[2] for pr in preds]\n labels = (pr[1] for pr in preds)\n plt.barh(order, bar_preds, alpha=0.5)\n plt.yticks(order, labels)\n plt.xlabel('Probability')\n plt.xlim(0,1.01)\n plt.tight_layout()\n plt.show()\n return labels", "def view_images(dataset, size):\n images, labels = dataset\n assert images.shape[0] == labels.shape[0]\n\n num_images = images.shape[0]\n num_cols = 3\n num_rows = np.ceil(num_images / num_cols).astype(\"int\")\n plt.figure(figsize=size)\n for i in range(num_images):\n image = images[i]\n label = labels[i]\n ax = plt.subplot(num_rows, num_cols, i + 1)\n plt.imshow(np.array(image, dtype=\"float\"))\n plt.title(\"Number: \" + str(label))\n plt.axis(\"off\")", "def plot_latent_images(self, n):\n\n norm = tfp.distributions.Normal(0, 1)\n grid_x = norm.quantile(np.linspace(0.05, 0.95, n))\n grid_y = norm.quantile(np.linspace(0.05, 0.95, n))\n image_width = self.data.digit_size*n\n image_height = image_width\n image = np.zeros((image_height, image_width))\n\n for i, yi in enumerate(grid_x):\n for j, xi in enumerate(grid_y):\n z = np.array([[xi, yi]])\n x_decoded = self.model.sample(z)\n digit = tf.reshape(x_decoded[0], (self.data.digit_size, self.data.digit_size))\n image[i * self.data.digit_size: (i + 1) * self.data.digit_size,\n j * self.data.digit_size: (j + 1) * self.data.digit_size] = digit.numpy()\n\n plt.figure(figsize=(10, 10))\n plt.imshow(image, cmap='Greys_r')\n plt.axis('Off')\n plt.show()" ]
[ "0.6728893", "0.66888267", "0.6639547", "0.66114897", "0.6588676", "0.65629697", "0.6526726", "0.65079933", "0.63961387", "0.6348753", "0.6331709", "0.6322642", "0.6306844", "0.6306844", "0.6306844", "0.63059264", "0.6299445", "0.6282435", "0.62814975", "0.6277304", "0.62699014", "0.62673974", "0.62658733", "0.6231477", "0.62238497", "0.62105006", "0.61896306", "0.6160953", "0.6147881", "0.613655" ]
0.73599213
0
plot the trace of 2d function's figure and results
def show_trace_2d(f, results): plt.close() # draw input points plt.plot(*zip(*results), '-o', color='#ff7f0e') # get the field of figure x1, x2 = np.meshgrid(np.arange(-5.5, 1.0, 0.1), np.arange(-3.0, 1.0, 0.1)) # draw the contour of function using x1,x2 as step plt.contour(x1, x2, f(x1, x2), colors='#1f77b4') plt.xlabel('x1') plt.ylabel('x2') plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show_trace_2d(f, results): #@save\n set_figsize()\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = torch.meshgrid(torch.arange(-5.5, 1.0, 0.1),torch.arange(-3.0, 1.0, 0.1))\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')", "def show_trace_2d(f, results): #@save\n d2l.set_figsize()\n d2l.plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = d2l.meshgrid(d2l.arange(-5.5, 1.0, 0.1),\n d2l.arange(-3.0, 1.0, 0.1))\n d2l.plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n d2l.plt.xlabel('x1')\n d2l.plt.ylabel('x2')", "def plot():\n pass", "def make_plot(x,y):", "def plot(self):\n\t\tself.plotOfXray().plot()", "def printfunc(self):\n zero1=self.Newton(True)\n print \"Using initial porition %0.2f ,%0.2f\" %(self.x_init,self.y_0)\n print \"extremum calculated witn Newton-Rapson: %0.2f ,%0.2f.\"%(zero1[0],zero1[1])\n zero2=self.Newton(False)\n print \"extremum calculated witn Secant: %0.2f ,%0.2f.\" %(zero2[0],zero2[1])\n xlist=np.arange(self.x_0-10,self.x_0+10,0.01)\n ylist=np.arange(self.y_0-10,self.y_0+10,0.01)\n X,Y=np.meshgrid(xlist,ylist)\n Z=self.sfunc(X,Y)\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='3d')\n \n ax.plot(xlist, ylist, self.sfunc(xlist,ylist), 'g-',label='function $e^{(-(x-%0.2f)^2-(y-%0.2f)^2)}$' %(self.x_0,self.y_0))\n ax.contour(X, Y, Z)# colors = 'k', linestyles = 'solid')\n ax.plot([zero1[0]], [zero1[0]], self.sfunc(zero1[0],zero1[1]),'bo',label='extrema using Newton-Rapson (%0.2f; %0.2f)'%(zero1[0],zero1[1]))\n ax.plot([zero2[0]], [zero2[0]], self.sfunc(zero2[0],zero2[1]),'ro',label='extrema using Seacent (%0.2f; %0.2f)'%(zero2[0],zero2[1]))\n ax.legend()\n plt.show()", "def plot_trace(self):\n az.plot_trace(self.ifd_)", "def plotTrace(trace):\n for t in trace:\n plt.plot(range(len(t)),t,alpha=0.5)\n plt.ylabel(\"Trace\")\n plt.xlabel(\"Step\")\n\n return", "def display1(*args):\n #----------*----------* # unpack\n twiss_func = args[0]\n lat_plot = args[3]\n #-------------------- beta x,y & dispersion x\n s = [twiss_func(i,'s') for i in range(twiss_func.nbpoints)]\n bx = [twiss_func(i,'bx') for i in range(twiss_func.nbpoints)]\n by = [twiss_func(i,'by') for i in range(twiss_func.nbpoints)]\n dx = [twiss_func(i,'dx') for i in range(twiss_func.nbpoints)]\n #-------------------- lattice viseo\n vzero = [0. for i in range(lat_plot.nbpoints)] # zero line\n vis_abszisse = [lat_plot(i,'s') for i in range(lat_plot.nbpoints)]\n vis_ordinate = [lat_plot(i,'viseo') for i in range(lat_plot.nbpoints)]\n #-------------------- figure frame\n width=14; height=7.6\n plt.figure(num=0,figsize=(width,height),facecolor='#eaecef',tight_layout=False)\n\n #-------------------- transverse X\n splot111=plt.subplot(111)\n splot111.set_title('beta functions')\n plt.plot(s,bx, label=r'$\\beta_x$ [m]', color='red', linestyle='-') # beta x\n plt.plot(s,by, label=r'$\\beta_y$ [m]', color='blue', linestyle='-') # beta y\n plt.plot(s,dx, label=r'$\\eta_x$ [m]' , color='green',linestyle='-') # dispersion x\n vscale=splot111.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='black')\n plt.legend(loc='lower right',fontsize='x-small')", "def showPlot2():\n raise NotImplementedError", "def plot_figure(param1, param2):\n return 0", "def test_plot1(plot=1, version='scalar'):\n Lx = 10\n Ly = 10\n c = 1.0\n\n def I2(x, y):\n return exp(-(x-Lx/2.0)**2/2.0 -(y-Ly/2.0)**2/2.0)\n def f(x, y, t):\n return 0.0\n def bc(x, y, t):\n return 0.0\n\n I2 = StringFunction('exp(-(x-Lx/2.0)**2/2.0 -(y-Ly/2.0)**2/2.0)',\n independent_variables=('x', 'y'),\n Lx=Lx, Ly=Ly, globals=globals())\n f = StringFunction('0.0', independent_variables=('x', 'y', 't'),\n globals=globals())\n bc = StringFunction('0.0', independent_variables=('x', 'y', 't'),\n globals=globals())\n if plot:\n g = Gnuplot.Gnuplot(persist=1)\n g('set parametric')\n g('set data style lines')\n g('set hidden')\n g('set contour base')\n g('set zrange [-0.7:0.7]') # nice plot...\n \n def action(u, xv, yv, t):\n #print 'action, t=',t,'\\nu=',u, '\\nx=',x, '\\ny=', y\n if plot:\n data = Gnuplot.GridData(u, xv[:,0], yv[0,:], binary=0)\n g.splot(data)\n g('set title \"t=%g\"' % t)\n if plot == 2:\n g.hardcopy(filename='tmp_%020f.ps' % t, enhanced=1, mode='eps',\n color=0, fontname='Times-Roman', fontsize=14)\n time.sleep(1)\n time.sleep(0.2) # pause between frames\n\n implementation = {'ic': version, 'inner': version, 'bc': version}\n nx = 40; ny = 40; tstop = 20 # tstop = 700\n print 'test_plot1:', f, bc, I2\n dt, t_ic, t_inner, t_bc = \\\n solver(I2, f, c, bc, Lx, Ly, nx, ny, 0, tstop,\n user_action=action, implementation=implementation)\n print 'time ic: %s, time scheme: %s, time bc: %s' % (t_ic, t_inner, t_bc)\n time.sleep(3)", "def display3(*args):\n #-------------------- unpack\n twiss_fun = args[0]\n cos_like = args[1]\n sin_like = args[2]\n lat_plot = args[3]\n ape_plot = args[4]\n #-------------------- sigma functions\n # zero = [0. for i in range(sigma_fun.nbpoints)] # zero line\n z = [twiss_fun(i,'s') for i in range(twiss_fun.nbpoints)] # Abszisse\n sgx = [twiss_fun(i,'sigx')*1.e3 for i in range(twiss_fun.nbpoints)] # envelope (sigma-x)\n sgy = [twiss_fun(i,'sigy')*1.e3 for i in range(twiss_fun.nbpoints)] # envelope (sigma-y)\n #-------------------- trajectories\n z1= [cos_like(i,'s') for i in range(cos_like.nbpoints)]\n cx= [cos_like(i,'cx')*1.e3 for i in range(cos_like.nbpoints)]\n # cxp= [cos_like(i,'cxp')*1.e3 for i in range(cos_like.nbpoints)]\n cy= [cos_like(i,'cy')*1.e3 for i in range(cos_like.nbpoints)]\n # cyp= [cos_like(i,'cyp')*1.e3 for i in range(cos_like.nbpoints)]\n cz= [cos_like(i,'cz') for i in range(cos_like.nbpoints)]\n cdp= [cos_like(i,'cdp') for i in range(cos_like.nbpoints)]\n\n z2= [sin_like(i,'s') for i in range(sin_like.nbpoints)]\n sx= [sin_like(i,'sx')*1.e3 for i in range(sin_like.nbpoints)]\n # sxp= [sin_like(i,'sxp')*1.e3 for i in range(sin_like.nbpoints)]\n sy= [sin_like(i,'sy')*1.e3 for i in range(sin_like.nbpoints)]\n # syp= [sin_like(i,'syp')*1.e3 for i in range(sin_like.nbpoints)]\n sz= [sin_like(i,'sz') for i in range(sin_like.nbpoints)]\n sdp= [sin_like(i,'sdp') for i in range(sin_like.nbpoints)]\n #-------------------- lattice viseo\n vzero = [0. for i in range(lat_plot.nbpoints)] # zero line\n vis_abszisse = [lat_plot(i,'s') for i in range(lat_plot.nbpoints)]\n vis_ordinate = [lat_plot(i,'viseo') for i in range(lat_plot.nbpoints)]\n ape_abszisse = [ape_plot(i,'s') for i in range(ape_plot.nbpoints)]\n ape_ordinate = [ape_plot(i,'aperture')*1.e3 for i in range(ape_plot.nbpoints)]\n #-------------------- figure frame\n width=14; height=7.6\n # fighdr = 'lattice version = {}, input file = {}'.format(PARAMS['lattice_version'],PARAMS['input_file'])\n fig = plt.figure(num=1,figsize=(width,height),facecolor='#eaecef',tight_layout=False)\n\n #-------------------- transverse X tracks\n splot311=plt.subplot(311)\n # splot311=plt.subplot(10,1,(1,3))\n splot311.set_title('transverse x')\n # mapping box\n splot311.text(0.01, 1.1,UTIL.FLAGS.get('mapping'),transform=splot311.transAxes,fontsize=8,bbox=dict(boxstyle='round',facecolor='wheat',alpha=0.5),verticalalignment='top')\n if UTIL.FLAGS['envelope']:\n plt.plot(z,sgx ,label=r'$\\sigma$ [mm]',color='green')\n plt.plot(z1,cx, label=\"C [mm]\",color='blue',linestyle='-')\n # plt.plot(z1,cxp,label=\"C' [mr]\",color='blue',linestyle=':')\n plt.plot(z2,sx, label=\"S [mm]\",color='red' ,linestyle='-')\n # plt.plot(z2,sxp,label=\"S' [mr]\",color='red' ,linestyle=':')\n vscale=splot311.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='green',linestyle='--')\n # apertures\n if UTIL.FLAGS['useaper']:\n plt.plot(ape_abszisse,ape_ordinate,linestyle='-.')\n N = UTIL.PARAMS['nbsigma']\n sgx = [i*N for i in sgx]\n #label = F'{N:1}$\\sigma$ [mm]'\n label = '{:1}$\\sigma$ [mm]'.format(N)\n plt.plot(z,sgx ,label=label,color='green',linestyle=':')\n # zero line\n splot311.plot(vis_abszisse,vzero,color='green',linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- transverse Y tracks\n splot312=plt.subplot(312)\n # splot312=plt.subplot(10,1,(4,6))\n splot312.set_title('transverse y')\n if UTIL.FLAGS['envelope']:\n plt.plot(z,sgy ,label=r'$\\sigma$ [mm]',color='green')\n plt.plot(z1,cy, label=\"C [mm]\",color='blue',linestyle='-')\n # plt.plot(z1,cyp,label=\"C' [mr]\",color='blue',linestyle=':')\n plt.plot(z2,sy, label=\"S [mm]\",color='red' ,linestyle='-')\n vscale=splot312.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='green',linestyle='--')\n # apertures\n if UTIL.FLAGS['useaper']:\n plt.plot(ape_abszisse,ape_ordinate,linestyle='-.')\n N = UTIL.PARAMS['nbsigma']\n sgy = [i*N for i in sgy]\n plt.plot(z,sgy ,label=label,color='green',linestyle=':')\n # zero line\n splot312.plot(vis_abszisse,vzero,color='green',linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- longitudinal tracks z, dP/P\n # ax_l = left abszisse\n ax_l=plt.subplot(313)\n # ax_l=plt.subplot(10,1,(7,9))\n ax_l.set_title('longitudinal')\n ax_l.set_ylabel(r\"z [mm]\")\n ax_l.tick_params(axis='y', colors='green')\n ax_l.yaxis.label.set_color('green')\n ax_l.plot(z1,cz,label='C',color='green')\n ax_l.plot(z2,sz,label='S',color='green',linestyle=':')\n plt.legend(loc='lower left',fontsize='x-small')\n # ax_r = right abszisse\n ax_r = ax_l.twinx()\n ax_r.set_ylabel(r'$\\Delta$p/p [%]')\n ax_r.tick_params(axis='y', colors='red')\n ax_r.yaxis.label.set_color('red')\n ax_r.plot(z1,cdp,label='C',color='red')\n ax_r.plot(z2,sdp,label='S',color='red',linestyle=':')\n ax_r.plot(vis_abszisse,vzero,color='red', linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n # lattice elements\n vscale=ax_l.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n ax_l.plot(vis_abszisse,viseoz,label='',color='black')\n ax_l.plot(vis_abszisse,vzero,color='green',linestyle='--')", "def plot_results_2d(p_1, p_2, d_1 = 'X', d_2 = 'Y'):\n plt.figure(figsize = (10, 10))\n ax = plt.axes() \n\n color=iter(cm.rainbow(np.linspace(0,1,p_1.shape[0]))) # (1)\n labels = ['Particle ' + str(pl+1) for pl in np.arange(0, p_1.shape[0], step = 1)]\n\n for p in np.arange(0, p_1.shape[0], step = 1): \n c = next(color) # (c)\n for t in np.arange(0, p_1.shape[1], step = 1): \n plt.plot(p_1[p, t], p_2[p, t], 'x', c = c, label = labels[p])\n legend_without_duplicate_labels(ax)\n ax.grid(b = 'True', which = 'major')\n ax.set_xlabel(d_1) \n ax.set_ylabel(d_2)\n ax.set_title('2D particle trajectories')", "def plot(self):\n pass", "def display4(*args):\n #-------------------- unpack\n twiss_func = args[0]\n cos_like = args[1]\n sin_like = args[2]\n lat_plot = args[3]\n #-------------------- beta x,y & dispersion x\n s = [twiss_func(i,'s') for i in range(twiss_func.nbpoints)] # Abszisse\n bx = [twiss_func(i,'bx') for i in range(twiss_func.nbpoints)] # beta x\n by = [twiss_func(i,'by') for i in range(twiss_func.nbpoints)] # beta y\n dx = [twiss_func(i,'dx') for i in range(twiss_func.nbpoints)] # dispersion x\n#-------------------- longitudinal trajectories\n z1= [cos_like(i,'s') for i in range(cos_like.nbpoints)]\n cz= [cos_like(i,'cz') for i in range(cos_like.nbpoints)]\n cdp= [cos_like(i,'cdp') for i in range(cos_like.nbpoints)]\n\n z2= [sin_like(i,'s') for i in range(sin_like.nbpoints)]\n sz= [sin_like(i,'sz') for i in range(sin_like.nbpoints)]\n sdp= [sin_like(i,'sdp') for i in range(sin_like.nbpoints)]\n #-------------------- lattice viseo\n vzero = [0. for i in range(lat_plot.nbpoints)] # zero line\n vis_abszisse = [lat_plot(i,'s') for i in range(lat_plot.nbpoints)]\n vis_ordinate = [lat_plot(i,'viseo') for i in range(lat_plot.nbpoints)]\n #-------------------- figure frame\n width=14; height=7.6\n # fighdr = 'lattice version = {}, input file = {}'.format(PARAMS['lattice_version'],PARAMS['input_file'])\n fig = plt.figure(num=1,figsize=(width,height),facecolor='#eaecef',tight_layout=False)\n\n #-------------------- beta functions\n splot211=plt.subplot(211)\n splot211.set_title('beta x,y')\n # mapping box\n splot211.text(0.01, 1.1, UTIL.FLAGS.get('mapping'),transform=splot211.transAxes,fontsize=8,bbox=dict(boxstyle='round',facecolor='wheat',alpha=0.5),verticalalignment='top')\n # function plots\n plt.plot(s,bx, label=r\"$\\beta$x [m]\", color='black', linestyle='-')\n plt.plot(s,by, label=r\"$\\beta$y [m]\", color='red', linestyle='-')\n plt.plot(s,dx, label=r'$\\eta_x$ [m]' , color='green', linestyle='-') # dispersion x\n vscale=splot211.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='green',linestyle='--')\n # zero line\n splot211.plot(vis_abszisse,vzero,color='green',linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- longitudinal tracks z, dP/P\n # ax_l = left abszisse\n ax_l=plt.subplot(212)\n # ax_l=plt.subplot(10,1,(7,9))\n ax_l.set_title('synchrotron oscillation')\n ax_l.set_ylabel(r\"z [mm]\")\n ax_l.tick_params(axis='y', colors='green')\n ax_l.yaxis.label.set_color('green')\n ax_l.plot(z1,cz,label='C',color='green')\n ax_l.plot(z2,sz,label='S',color='green',linestyle=':')\n plt.legend(loc='lower left',fontsize='x-small')\n # ax_r = right abszisse\n ax_r = ax_l.twinx()\n ax_r.set_ylabel(r'$\\Delta$p/p [%]')\n ax_r.tick_params(axis='y', colors='red')\n ax_r.yaxis.label.set_color('red')\n ax_r.plot(z2,cdp,label='C',color='red')\n ax_r.plot(z2,sdp,label='S',color='red',linestyle=':')\n ax_r.plot(vis_abszisse,vzero,color='red', linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n # lattice elements\n vscale=ax_l.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n ax_l.plot(vis_abszisse,viseoz,label='',color='black')\n ax_l.plot(vis_abszisse,vzero,color='green',linestyle='--')", "def visualize(self, save: bool = False) -> None:\n import matplotlib.pyplot as plt\n import inspect\n\n plt.style.use('seaborn-whitegrid')\n plt.rcParams['figure.figsize'] = [10, 5]\n if not self.inverse_transformation:\n grid = np.linspace(0, 1, 10000)\n func = self.pdf(np.linspace(0, 1, 10000))\n try:\n plt.plot(grid, func)\n except:\n plt.plot(grid, np.repeat(func, 10000))\n plt.title('Intensity function')\n plt.xlabel('time')\n plt.ylabel('value')\n if save:\n try:\n plt.savefig('intensity_function_' + inspect.getsource(self.pdf).split('return')[\n 1].strip() + '.png')\n print('Saved as ' + 'intensity_function_' + inspect.getsource(self.pdf).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving intensity function failed!\")\n plt.show()\n plt.clf()\n\n t = self.generate()\n plt.step(t, list(range(0, len(t))))\n plt.title('Simulated trajectory')\n plt.xlabel('time')\n plt.ylabel('value')\n if save:\n try:\n plt.savefig(\n 'trajectory_' + inspect.getsource(self.pdf).split('return')[1].strip() + '.png')\n print('Saved as ' + 'trajectory_' + inspect.getsource(self.pdf).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving trajectory failed!\")\n plt.show()\n plt.clf()\n\n plt.plot(t, list(np.repeat(0, len(t))), '.')\n plt.title('Simulated points')\n plt.xlabel('time')\n if save:\n try:\n plt.savefig('points_' + inspect.getsource(self.pdf).split('return')[1].strip() + '.png')\n print('Saved as ' + 'points_' + inspect.getsource(self.pdf).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving points failed!\")\n plt.show()\n plt.clf()", "def plotalltraces(td):\n \n plotmsubtrace(td, 211)\n dftf.plotflypic(td, 212)", "def visualize(self, save=False):\n import matplotlib.pyplot as plt\n import inspect\n\n plt.style.use('seaborn-whitegrid')\n plt.rcParams['figure.figsize'] = [10, 5]\n\n grid = np.linspace(self.lower, self.upper, 10000)\n func = self.intensity_function(np.linspace(self.lower, self.upper, 10000))\n try:\n plt.plot(grid, func)\n except:\n plt.plot(grid, np.repeat(func, 10000))\n plt.title('Intensity function')\n plt.xlabel('time')\n plt.ylabel('value')\n if save:\n try:\n plt.savefig('intensity_function_' + inspect.getsource(self.intensity_function).split('return')[\n 1].strip() + '.png')\n print('Saved as ' + 'intensity_function_' + inspect.getsource(self.intensity_function).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving intensity function failed!\")\n plt.show()\n plt.clf()\n\n t = self.generate()\n plt.step(t, list(range(0, len(t))))\n plt.title('Simulated trajectory')\n plt.xlabel('time')\n plt.ylabel('value')\n if save:\n try:\n plt.savefig(\n 'trajectory_' + inspect.getsource(self.intensity_function).split('return')[1].strip() + '.png')\n print('Saved as ' + 'trajectory_' + inspect.getsource(self.intensity_function).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving trajectory failed!\")\n plt.show()\n plt.clf()\n\n plt.plot(t, list(np.repeat(0, len(t))), '.')\n plt.title('Simulated points')\n plt.xlabel('time')\n if save:\n try:\n plt.savefig('points_' + inspect.getsource(self.intensity_function).split('return')[1].strip() + '.png')\n print('Saved as ' + 'points_' + inspect.getsource(self.intensity_function).split('return')[\n 1].strip() + '.png')\n except:\n warnings.warn(\"Saving points failed!\")\n plt.show()\n plt.clf()", "def create_fig_2d(self, data_array_2d, output_fn='', xlabel='', ylabel='', title=''):", "def plot_2D(df):\n import matplotlib.pyplot as plt\n fig = plt.figure(figsize=(8,6))\n fig.clf()\n #Get the current Axes instance on the current figure matching the given \n #keyword args, or create one.\n ax = fig.gca()\n df.plot(kind = 'scatter', x = 'x', y = 'y', ax = ax, alpha = 0.5)\n ax.set_xlabel('X')\n ax.set_ylabel('Y')\n ax.set_title('X vs. Y')\n return 'Done'", "def plot(self):\n\t\tself.plotOfTF().plot()", "def _plot_trace(self, trace, axis, idx):\n xy = [trace[0].value]\n for prevrecord, record in zip(trace[:-1], trace[1:]):\n if np.abs(prevrecord.value[0]-record.value[0]) > self.halfbox[0] or \\\n np.abs(prevrecord.value[1]-record.value[1]) > self.halfbox[1] :\n xy = np.asarray(xy)\n axis.plot(xy[:,0], xy[:,1], \"-\", color=colors.color(idx))\n xy = [record.value]\n else:\n xy.append(record.value)\n xy = np.asarray(xy)\n axis.plot(xy[:,0], xy[:,1], \"-\", color=colors.color(idx))\n axis.set_xlim(0, self.box[0])\n axis.set_ylim(0, self.box[1])\n axis.set_aspect('equal')", "def plot_f(self, *args, **kwargs):\r\n kwargs['plot_raw'] = True\r\n self.plot(*args, **kwargs)", "def display0(*args):\n #----------*----------* # unpack\n twiss_func = args[0]\n cos_like = args[1]\n sin_like = args[2]\n lat_plot = args[3]\n #-------------------- Bahnkoordinate (z)\n z = [twiss_func(i,'s') for i in range(twiss_func.nbpoints)]\n sgx = [twiss_func(i,'sigx') for i in range(twiss_func.nbpoints)]\n sgy = [twiss_func(i,'sigy') for i in range(twiss_func.nbpoints)]\n # zero = [0. for i in range(sigma_fun.nbpoints)]\n #-------------------- trajectories (tz)\n tz= [cos_like(i,'s') for i in range(cos_like.nbpoints)]\n cx= [cos_like(i,'cx') for i in range(cos_like.nbpoints)]\n # cxp= [cos_like(i,'cxp') for i in range(cos_like.nbpoints)]\n cy= [cos_like(i,'cy') for i in range(cos_like.nbpoints)]\n # cyp= [cos_like(i,'cyp') for i in range(cos_like.nbpoints)]\n # cz= [cos_like(i,'cz') for i in range(cos_like.nbpoints)]\n # cdp= [cos_like(i,'cdp') for i in range(cos_like.nbpoints)]\n\n sx= [sin_like(i,'sx') for i in range(sin_like.nbpoints)]\n # sxp= [sin_like(i,'sxp') for i in range(sin_like.nbpoints)]\n sy= [sin_like(i,'sy') for i in range(sin_like.nbpoints)]\n # syp= [sin_like(i,'syp') for i in range(sin_like.nbpoints)]\n # sz= [sin_like(i,'sz') for i in range(sin_like.nbpoints)]\n # sdp= [sin_like(i,'sdp') for i in range(sin_like.nbpoints)]\n #-------------------- lattice viseo\n stop_viseox = 5 # stop viseo plot after so many [m]\n stop_viseoy = 5 # stop viseo plot after so many [m]\n vzero = [0. for i in range(lat_plot.nbpoints)] # zero line\n vis_abszisse = [lat_plot(i,'s') for i in range(lat_plot.nbpoints)]\n vis_ordinate = [lat_plot(i,'viseo') for i in range(lat_plot.nbpoints)]\n #-------------------- figure frame\n width=14; height=7.6\n plt.figure(num=0,figsize=(width,height),facecolor='#eaecef',tight_layout=False)\n\n #-------------------- transverse X\n splot211=plt.subplot(211)\n splot211.set_title('transverse x')\n plt.plot(z,sgx ,label=r'$\\sigma$ [m]',color='green')\n plt.plot(tz,cx ,label='Cx[m]', color='blue',linestyle='-')\n # plt.plot(tz,cxp,label=\"Cx'[m]\",color='blue',linestyle=':')\n plt.plot(tz,sx, label='Sx[m]', color='red' ,linestyle='-')\n # plt.plot(tz,sxp,label=\"Sx'[m]\",color='red' ,linestyle=':')\n # vscale=plt.axis()[3]*0.1\n # viseox = [x*vscale for x in vis_ordinate]\n # for i,s in enumerate(vis_abszisse):\n # if s > stop_viseox:\n # viseox[i] = 0.\n # plt.plot(vis_abszisse,viseox,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='black')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- transverse Y\n splot212=plt.subplot(212)\n splot212.set_title('transverse y')\n plt.plot(z,sgy ,label=r'$\\sigma$ [m]',color='green')\n plt.plot(tz,cy, label='Cy[m]', color='blue',linestyle='-')\n # plt.plot(tz,cyp,label=\"Cy'[m]\",color='blue',linestyle=':')\n plt.plot(tz,sy, label='Sy[m]', color='red' ,linestyle='-')\n # plt.plot(tz,syp,label=\"Sy'[m]\",color='red' ,linestyle=':')\n vscale=plt.axis()[3]*0.1\n viseoy = [x*vscale for x in vis_ordinate]\n # for i,s in enumerate(vis_abszisse):\n # if s > stop_viseoy:\n # viseoy[i] = 0.\n plt.plot(vis_abszisse,viseoy,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='black')\n plt.legend(loc='lower right',fontsize='x-small')", "def main():\r\n plot = Plotter(0.5, 1.2)\r\n plot.plot_func()", "def plot(self, *args, **kwargs):\n pass", "def plot(n, p, psi):\n # plt.plot(psi_inf(n) ** 2, label=\"analytic\")\n c1 = \"black\"\n fig, ax1 = plt.subplots()\n ax1.plot(psi[n - 1] ** 2, label=r\"$n$ = %d\" % n, color=c1)\n ax1.set_xlabel(r\"$i$\")\n ax1.set_ylabel(r\"$|\\psi(x)|^2$\", color=c1)\n for t in ax1.get_yticklabels():\n t.set_color(c1)\n\n ax2 = ax1.twinx()\n c2 = \"#5b07ed\"\n pot = np.array([potential(i, p) for i in range(N)])\n ax2.plot(pot, label=\"potential\", color=c2, linewidth=4)\n ax2.set_ylabel(\"potential\", color=c2)\n for t in ax2.get_yticklabels():\n t.set_color(c2)\n\n ncols = 1 if n > 2 else 2\n # ask matplotlib for the plotted objects and their labels, from http://stackoverflow.com/a/10129461\n lines, labels = ax1.get_legend_handles_labels()\n lines2, labels2 = ax2.get_legend_handles_labels()\n ax2.legend(lines + lines2, labels + labels2, loc=\"upper center\", ncol=ncols)\n\n ylim = {1: 0.037, 2: 0.027}\n if n in ylim:\n ax1.set_ylim([0, ylim[n]])\n\n plt.title(r\"Time-independent Schrödinger: $n = %d$\" % n)\n plt.show()\n # plt.savefig(\"%s_%d\" % (p, n))\n plt.close()", "def plot_graph(self) -> None:", "def plot():\n xvals = np.arange(-50, 250, step=0.1)\n\n fig = plt.figure()\n plt.suptitle(\"Gaussian with smooth transition to power law\")\n\n A0vals = [10, 11]\n avals = [5*10**-3, 10**-3, 5*10**-4]\n ttvals = [10., 50., 100.]\n cvals = [-0.1, -0.9, -5./3., -4.]\n offset = [-30, 0.0, 30]\n\n paramvals = [A0vals, avals, ttvals,cvals, offset]\n titles, labels = return_parameter_names()\n\n nplots = len(paramvals)\n\n for i in range(nplots):\n plt.subplot(nplots, 1, i+1)\n vals = paramvals[i]\n for j in range(len(vals)):\n pset = list(default())\n pset[i] = vals[j]\n yvals=[]\n ypower=[]\n ypeak=[]\n for x in xvals:\n yvals.append(fitfunc(x, pset))\n ypeak.append(logpeak(x,pset))\n if x > 0:\n ypower.append(logpowerlaw(x,pset))\n label = labels[i] + \"=\"+str(vals[j])\n plt.plot(xvals, yvals, label = label)\n\n plt.title(titles[i])\n plt.legend()\n\n fig.set_size_inches(15, 30)\n plt.savefig(\"graphs/misc/lightcurve_models.pdf\")\n plt.close()" ]
[ "0.8243612", "0.80253506", "0.71199554", "0.6722876", "0.6595502", "0.6406169", "0.6404475", "0.63895744", "0.63739944", "0.6362752", "0.6319799", "0.62973255", "0.6270325", "0.6204418", "0.6188492", "0.6185035", "0.617978", "0.6163521", "0.61322105", "0.61197263", "0.6113812", "0.6103722", "0.60640526", "0.60615396", "0.6059148", "0.60523695", "0.60383075", "0.6014635", "0.5998445", "0.5968047" ]
0.820347
1
Transform bbox to rectangle which can be plot in a figure.
def bbox_to_rect(bbox, color): return plt.Rectangle( xy=(bbox[0], bbox[1]), width=bbox[2]-bbox[0], height=bbox[3]-bbox[1], fill=False, edgecolor=color, linewidth=2 )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vis_bbox(im, bbox):\n im = im[:, :, (2, 1, 0)]\n fig, ax = plt.subplots(figsize=(12, 12))\n ax.imshow(im, aspect='equal')\n ax.add_patch(\n plt.Rectangle((bbox[0], bbox[1]),\n bbox[2] - bbox[0],\n bbox[3] - bbox[1], fill=False,\n edgecolor='red', linewidth=3.5)\n )\n\n plt.axis('off')\n plt.tight_layout()\n plt.draw()", "def to_bounding_box(self):\n if self.bbox is not None:\n return self.bbox\n from .bbox import BBox\n\n xx = self.xx\n yy = self.yy\n self.bbox = BBox(xmin=min(xx), xmax=max(xx), ymin=min(yy), ymax=max(yy), label=self.label, **self.fields)\n return self.bbox", "def _decode_bbox(self, normalized_bbox):\n #apply the inverse of transformation\n y1,x1,y2,x2 = preprocess.apply_transformation(normalized_bbox,\n np.linalg.inv(self.transformation))\n\n w,h = self.image_size\n y1,x1,y2,x2 = y1*h,x1*w,y2*h,x2*w\n return vot.Rectangle(x1,y1,x2-x1,y2-y1)", "def convert_bbox(bbox, width, height):\n min_x, min_y, max_x, max_y = bbox\n # scale X axis\n min_x *= width\n max_x *= width\n # invert Y axis and scale\n min_y = (1 - min_y) * height\n max_y = (1 - max_y) * height\n\n return min_x, min_y, max_x, max_y", "def bbox(\n bbox: Tuple[Coordinate, Coordinate] = ((-1.0, -1.0), (3.0, 4.0)),\n layer: Tuple[int, int] = (1, 0),\n top: float = 0,\n bottom: float = 0,\n left: float = 0,\n right: float = 0,\n) -> gf.Component:\n D = gf.Component()\n (xmin, ymin), (xmax, ymax) = bbox\n points = [\n [xmin - left, ymin - bottom],\n [xmax + right, ymin - bottom],\n [xmax + right, ymax + top],\n [xmin - left, ymax + top],\n ]\n D.add_polygon(points, layer=layer)\n return D", "def box_to_rect(box, color, linewidth=3):\r\n box = box.asnumpy()\r\n return plt.Rectangle(\r\n (box[0], box[1]), box[2] - box[0], box[3] - box[1],\r\n fill=False, edgecolor=color, linewidth=linewidth)", "def box_to_rect(box, color, linewidth=3):\n box = box.asnumpy()\n return plt.Rectangle(\n (box[0], box[1]), box[2] - box[0], box[3] - box[1],\n fill=False, edgecolor=color, linewidth=linewidth)", "def box_to_rect(box, color, linewidth=3):\r\n box = box.asnumpy()\r\n return plt.Rectangle(\r\n (box[0], box[1]), box[2]-box[0], box[3]-box[1],\r\n fill=False, edgecolor=color, linewidth=linewidth)", "def vis_bbox(image: Image, bbox, color=_GREEN, thick=1) -> Image:\n image = image.astype(np.uint8)\n bbox = list(map(int, bbox))\n x0, y0, x1, y1 = bbox\n cv2.rectangle(image, (x0, y0), (x1, y1), color, thickness=thick)\n return image", "def draw_bbox(img, bbox: list,\n gca_axes: SubplotBase = None,\n label: str = None,\n color: str = \"#00ffff\") -> SubplotBase:\n\n cx, cy, w, h = bbox\n\n if not gca_axes:\n gca_axes = plt.gca()\n\n top_bottom_pt = ((cx - w/2) * img.size[0],\n (cy - h/2) * img.size[1])\n\n gca_axes.add_patch(plt.Rectangle(top_bottom_pt,\n w * img.size[0], h * img.size[1],\n color=color, fill=False, linewidth=2))\n\n font = {'color': 'white',\n 'weight': 'bold',\n 'size': 16}\n\n if label:\n gca_axes.text(top_bottom_pt[0], top_bottom_pt[1], label,\n fontdict=font,\n bbox={'facecolor': '#00abab', 'alpha': 1})\n\n return gca_axes", "def bbox(bbox = [(-1, -1), (3, 4)], layer = 0):\n D = Device(name = 'bbox')\n (a,b), (c,d) = bbox\n points = ((a,b), (c,b), (c,d), (a,d))\n D.add_polygon(points, layer = layer)\n return D", "def get_boundingbox(face, width, height, scale=1.3, minsize=None):\n x1 = face.left()\n y1 = face.top()\n x2 = face.right()\n y2 = face.bottom()\n size_bb = int(max(x2 - x1, y2 - y1) * scale)\n if minsize:\n if size_bb < minsize:\n size_bb = minsize\n center_x, center_y = (x1 + x2) // 2, (y1 + y2) // 2\n\n # Check for out of bounds, x-y top left corner\n x1 = max(int(center_x - size_bb // 2), 0)\n y1 = max(int(center_y - size_bb // 2), 0)\n # Check for too big bb size for given x, y\n size_bb = min(width - x1, size_bb)\n size_bb = min(height - y1, size_bb)\n\n return x1, y1, size_bb", "def canvas_bounds(self) -> utils.BoxRegion:", "def bbox(self, node):\n node_id = node.get('id')\n #inkex.utils.debug(\"Check if \" + str(node_id) + \" is in \" + str(self.node_info))\n info = self.node_info[node_id] \n \n x = info.x\n y = info.y\n width = info.width\n height = info.height\n\n return Box(Point(x, y),\n Point(x + width, y),\n Point(x + width, y + height),\n Point(x, y + height))", "def bbox(self):\n return self.canvas.bbox(self.boxitem)", "def vis_bbox_opencv(img, bbox, thick=1):\n (x0, y0, w, h) = bbox\n x1, y1 = int(x0 + w), int(y0 + h)\n x0, y0 = int(x0), int(y0)\n cv2.rectangle(img, (x0, y0), (x1, y1), _GREEN, thickness=thick)\n return img", "def bbox(self):\n lower = (self.x.min(), self.y.min())\n upper = (self.x.max(), self.y.max())\n return (lower, upper)", "def _boundRect(self):\n addresstamp = reduce(lambda x, y: x + y, [v.addresstamp for v in self.footprints])\n self.upperleft = list(map(min, zip(*addresstamp)))\n self.bottomright = list(map(max, zip(*addresstamp)))\n self.upperright = [self.bottomright[0], self.upperleft[1]]\n self.bottomleft = [self.upperleft[0], self.bottomright[1]]\n (self.width, self.height) = (self.upperright[0] - self.bottomleft[0], self.bottomleft[1] - self.upperright[1])\n assert self.width >= 0\n assert self.height >= 0\n self.center = [self.upperleft[0] + self.width / float(2), self.upperleft[1] + self.height / float(2)]\n self.corners = [self.upperright, self.bottomleft, self.upperleft, self.bottomright]", "def normalize_bbox(bbox: TBox, rows: int, cols: int) -> TBox:\n\n if rows <= 0:\n raise ValueError(\"Argument rows must be positive integer\")\n if cols <= 0:\n raise ValueError(\"Argument cols must be positive integer\")\n\n tail: Tuple[Any, ...]\n (x_min, y_min, x_max, y_max), tail = bbox[:4], tuple(bbox[4:])\n\n x_min, x_max = x_min / cols, x_max / cols\n y_min, y_max = y_min / rows, y_max / rows\n\n return cast(BoxType, (x_min, y_min, x_max, y_max) + tail) # type: ignore", "def boundingBox(self):\n minx, miny, maxx, maxy = self.substrates.bounds\n return pcbnew.BOX2I(\n pcbnew.VECTOR2I(int(minx), int(miny)),\n pcbnew.VECTOR2I(int(maxx - minx), int(maxy - miny)))", "def get_bbox(self, obj):\n renderer = self.figure.canvas.get_renderer()\n transformer = self.figure.dpi_scale_trans.inverted()\n return obj.get_window_extent(renderer=renderer).transformed(transformer)", "def bbox_transform(bbox):\n with tf.variable_scope('bbox_transform') as scope:\n cx, cy, w, h = bbox\n out_box = [[]]*4\n out_box[0] = cx-w/2\n out_box[1] = cy-h/2\n out_box[2] = cx+w/2\n out_box[3] = cy+h/2\n\n return out_box", "def bbox_format(self) -> bbox_utils.BBoxFormat:\n raise NotImplementedError", "def bbox_transform(bbox):\n with tf.variable_scope('bbox_transform') as scope:\n cx = bbox[..., 0]\n cy = bbox[..., 1]\n w = bbox[..., 2]\n h = bbox[..., 3]\n out_box = np.stack(\n [cx-w/2, cy-h/2, cx+w/2, cy+h/2],\n axis=-1\n )\n return out_box", "def _boundRect(self):\n self.upperleft = list(map(min, zip(*self.addresstamp)))\n self.bottomright = list(map(max, zip(*self.addresstamp)))\n self.upperright = [self.bottomright[0], self.upperleft[1]]\n self.bottomleft = [self.upperleft[0], self.bottomright[1]]\n (self.width, self.height) = (self.upperright[0] - self.bottomleft[0], self.bottomleft[1] - self.upperright[1])\n assert self.width >= 0\n assert self.height >= 0\n self.center = [self.upperleft[0] + self.width / float(2), self.upperleft[1] + self.height / float(2)]\n self.corners = [self.upperright, self.bottomleft, self.upperleft, self.bottomright]", "def getbbox(self):\n pass", "def bbox(self, *args):\n return self._canvas.bbox(*args)", "def update_bbox(self): \r\n \r\n centX, centY = self.center\r\n\r\n brush_thickness = self.brush[0]\r\n\r\n margin = self.__size + brush_thickness + BOUNDARY_MARGIN\r\n\r\n self.bbox = [int(centX - margin), int(centY - margin),\r\n int(centX + margin), int(centY + margin)]", "def bbox(self):\n return self.canvas.bbox(self.tags)", "def bbox(self):\n return self.get_bounding_box()" ]
[ "0.7295237", "0.71999186", "0.7162662", "0.7128504", "0.6923683", "0.68880653", "0.68832767", "0.68619215", "0.6833269", "0.6768016", "0.67155373", "0.6684205", "0.66383326", "0.6633823", "0.66184807", "0.6602729", "0.65662926", "0.6528647", "0.6516691", "0.6504845", "0.647212", "0.6456942", "0.6447501", "0.6417111", "0.6392997", "0.63848484", "0.63822657", "0.6346024", "0.63372993", "0.6327001" ]
0.8192315
0
Launch DagFileProcessorManager processor and start DAG parsing loop in manager.
def start(self) -> None: context = self._get_multiprocessing_context() self._last_parsing_stat_received_at = time.monotonic() self._parent_signal_conn, child_signal_conn = context.Pipe() process = context.Process( target=type(self)._run_processor_manager, args=( self._dag_directory, self._max_runs, self._processor_timeout, child_signal_conn, self._dag_ids, self._pickle_dags, self._async_mode, ), ) self._process = process process.start() self.log.info("Launched DagFileProcessorManager with pid: %s", process.pid)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def start(self):\n self.register_exit_signals()\n\n set_new_process_group()\n\n self.log.info(\"Processing files using up to %s processes at a time \", self._parallelism)\n self.log.info(\"Process each file at most once every %s seconds\", self._file_process_interval)\n self.log.info(\n \"Checking for new files in %s every %s seconds\", self._dag_directory, self.dag_dir_list_interval\n )\n\n return self._run_parsing_loop()", "def start_new_processes(self):\n # initialize cache to mutualize calls to Variable.get in DAGs\n # needs to be done before this process is forked to create the DAG parsing processes.\n SecretCache.init()\n\n while self._parallelism - len(self._processors) > 0 and self._file_path_queue:\n file_path = self._file_path_queue.popleft()\n # Stop creating duplicate processor i.e. processor with the same filepath\n if file_path in self._processors:\n continue\n\n callback_to_execute_for_file = self._callback_to_execute[file_path]\n processor = self._create_process(\n file_path,\n self._pickle_dags,\n self._dag_ids,\n self.get_dag_directory(),\n callback_to_execute_for_file,\n )\n\n del self._callback_to_execute[file_path]\n Stats.incr(\"dag_processing.processes\", tags={\"file_path\": file_path, \"action\": \"start\"})\n\n processor.start()\n self.log.debug(\"Started a process (PID: %s) to generate tasks for %s\", processor.pid, file_path)\n self._processors[file_path] = processor\n self.waitables[processor.waitable_handle] = processor\n\n Stats.gauge(\"dag_processing.file_path_queue_size\", len(self._file_path_queue))", "def start_processing(self,recurse = True):\n if isinstance(self.model_source, str) and self.auto_reload:\n self.observer = ChangeWatcher(self.model_source)\n \n if self.mapping_process is None:\n self.run_map.value = True\n self.mapping_process = multiprocessing.Process(target = self.mapping_runner)\n self.mapping_process.start()\n super(DNN2, self).start_processing(recurse)", "def _heartbeat_manager(self):\n if not self._parent_signal_conn:\n raise ValueError(\"Process not started.\")\n if self._process and not self._process.is_alive():\n self._process.join(timeout=0)\n if not self.done:\n self.log.warning(\n \"DagFileProcessorManager (PID=%d) exited with exit code %d - re-launching\",\n self._process.pid,\n self._process.exitcode,\n )\n self.start()\n\n if self.done:\n return\n\n parsing_stat_age = time.monotonic() - self._last_parsing_stat_received_at\n if parsing_stat_age > self._processor_timeout.total_seconds():\n Stats.incr(\"dag_processing.manager_stalls\")\n self.log.error(\n \"DagFileProcessorManager (PID=%d) last sent a heartbeat %.2f seconds ago! Restarting it\",\n self._process.pid,\n parsing_stat_age,\n )\n reap_process_group(self._process.pid, logger=self.log)\n self.start()", "def run_single_parsing_loop(self) -> None:\n if not self._parent_signal_conn or not self._process:\n raise ValueError(\"Process not started.\")\n if not self._process.is_alive():\n return\n\n try:\n self._parent_signal_conn.send(DagParsingSignal.AGENT_RUN_ONCE)\n except ConnectionError:\n # If this died cos of an error then we will noticed and restarted\n # when harvest_serialized_dags calls _heartbeat_manager.\n pass", "def __create_dag(self):\n dag_man = htc.DAGMan(\n filename=os.path.join(self.__job_dir, 'diamond.dag'),\n status_file=os.path.join(self.__job_dir, 'diamond.status'),\n dot='diamond.dot'\n )\n\n # layer 1 - ntuples\n ntuple_jobs = self.__create_ntuple_layer()\n for job in ntuple_jobs:\n dag_man.add_job(job, retry=RETRY_COUNT)\n\n # # layer 2 - analysis\n # for mode in ANALYSIS_MODES:\n # analysis_jobs = self.__create_analysis_layer(ntuple_jobs, mode)\n # for job in analysis_jobs:\n # dag_man.add_job(job, requires=ntuple_jobs, retry=RETRY_COUNT)\n # # layer 2b\n # # for each analysis mode create 1 merged file\n # merge_jobs = self.__create_merge_layer(analysis_jobs, mode)\n # for job in merge_jobs:\n # dag_man.add_job(job, requires=analysis_jobs, retry=2)\n\n self.__dag = dag_man", "def start_processing(self):", "def run(self, tag):\n pkg = PACKAGES.get(tag, None)\n if not pkg or not pkg.get('processor'):\n print \"Could not find package processor {}\".format(tag)\n sys.exit(1)\n\n pth = self._ensure_relative_directory('data/{}'.format(tag))\n processor_klass = pkg['processor']\n processor = processor_klass()\n processor.set_config(self.config, pth)\n processor.process()", "def run(self):\n operation_manager = self._core.get_operation_manager()\n while True:\n while operation_manager.process_next():\n pass\n sleep(2)", "def start_process(self):\n\n self.pre_process_information() # process the information based on config files\n\n self.get_config_file() # extract config file(yaml or string) data in self.main_file\n\n self.check_diff_as_arg()\n # if self.args.diff is true, check if snap file (yaml) details are provided and process it\n\n host_dict = (\n {}\n ) # an empty dictionary created in which we will store lists of hosts\n\n if self.args.pre_snapfile is not None:\n output_file = self.args.pre_snapfile\n elif self.args.snapcheck is True:\n output_file = \"snap_temp\"\n self.snap_del = True\n else:\n output_file = \"\"\n\n self.extract_device_information(\n host_dict\n ) # extract information from the config file or arguments\n\n config_data = self.main_file\n self.connect_multiple_device(host_dict, config_data, output_file)\n # connect to list of devices extracted in the host_dict", "def run(self):\n self._params = self.parsingcommands()\n self.start()", "def run_huawei_2g_parser(parent_dag_name, child_dag_name, start_date, schedule_interval):\n dag = DAG(\n '%s.%s' % (parent_dag_name, child_dag_name),\n schedule_interval=schedule_interval,\n start_date=start_date,\n )\n\n t23 = DummyOperator( task_id='branch_huawei_2g_parser', dag=dag)\n\n t29 = BashOperator(\n task_id='run_huawei_2g_xml_nbi_parser',\n bash_command='java -jar /mediation/bin/boda-huaweinbixmlparser.jar /mediation/data/cm/huawei/raw/nbi_gsm /mediation/data/cm/huawei/parsed/nbi_gsm /mediation/conf/cm/hua_cm_2g_nbi_parameters.cfg',\n dag=dag)\n\n t29_2 = BashOperator(\n task_id='run_huawei_2g_mml_parser',\n bash_command='java -jar /mediation/bin/boda-huaweimmlparser.jar /mediation/data/cm/huawei/raw/mml_gsm /mediation/data/cm/huawei/parsed/mml_gsm /mediation/conf/cm/hua_cm_2g_mml_parser.cfg',\n dag=dag)\n\n t29_3 = BashOperator(\n task_id='run_huawei_2g_xml_gexport_parser',\n bash_command='java -jar /mediation/bin/boda-huaweicmobjectparser.jar /mediation/data/cm/huawei/raw/gexport_gsm /mediation/data/cm/huawei/parsed/gexport_gsm /mediation/conf/cm/gexport_gsm_parser.cfg',\n dag=dag)\n\n t_join = DummyOperator(\n task_id='join_huawei_2g_parser',\n dag=dag,\n )\n\n dag.set_dependency('branch_huawei_2g_parser', 'run_huawei_2g_mml_parser')\n dag.set_dependency('branch_huawei_2g_parser', 'run_huawei_2g_xml_nbi_parser')\n dag.set_dependency('branch_huawei_2g_parser', 'run_huawei_2g_xml_gexport_parser')\n\n dag.set_dependency('run_huawei_2g_mml_parser', 'join_huawei_2g_parser')\n dag.set_dependency('run_huawei_2g_xml_nbi_parser', 'join_huawei_2g_parser')\n dag.set_dependency('run_huawei_2g_xml_gexport_parser', 'join_huawei_2g_parser')\n\n\n return dag", "def run_parse(self):\n # Data set already has source file names from load_inputs\n parsedset = {}\n parsedset['data_set'] = []\n for log in self.input_files:\n parsemodule = self.parse_modules[self.args.parser]\n try:\n if self.args.tzone:\n parsemodule.tzone = self.args.tzone\n except NameError: pass\n parsedset['data_set'].append(parsemodule.parse_file(log))\n self.data_set = parsedset\n del(parsedset)", "def run(self):\n\n step = self.steps['diagnostics_files']\n step.cores = self.config.getint('make_diagnostics_files', 'cores')\n\n # run the step\n super().run()", "def run(self):\n while self.container.process(): pass", "def run(self):\r\n __data__ = abspath(join(dirname( __file__ ), '..', 'data'))\r\n files = [ f for f in listdir(__data__) \r\n if isfile(join(__data__,f)) ]\r\n\r\n # Spawn processes\r\n pids = []\r\n for index, ts_name in enumerate(files):\r\n if ts_name == \".DS_Store\":\r\n \tcontinue\r\n\r\n __data__ = abspath(join(dirname( __file__ ), '..', 'data'))\r\n with open(join(__data__ + \"/\" + ts_name), 'r') as f:\r\n timeseries = json.loads(f.read())\r\n p = Process(target=run_algorithms, args=(timeseries, ts_name))\r\n pids.append(p)\r\n p.start()\r\n\r\n # Send wait signal to zombie processes\r\n for p in pids:\r\n p.join()", "def __start_process(self, subject, manager_data, ignore_inactive_db_check=False):\n\n if subject in self.autocontinue and (\n self.autosave.authorized or PyFunceble.CONFIGURATION.print_dots\n ):\n PyFunceble.LOGGER.info(f\"Skipped {subject!r}: already tested.\")\n print(\".\", end=\"\")\n else:\n original_config = PyFunceble.CONFIGURATION.copy()\n original_intern = PyFunceble.INTERN.copy()\n\n process = OurProcessWrapper(\n target=self.test,\n args=(\n subject,\n self.file_type,\n PyFunceble.LOADER,\n manager_data,\n original_intern,\n ignore_inactive_db_check,\n {\n \"api_file_generation\": PyFunceble.CONFIGURATION.db_type\n == \"json\",\n \"inactive_database\": False,\n \"auto_continue\": False,\n \"quiet\": PyFunceble.CONFIGURATION.quiet,\n },\n ),\n )\n process.name = f\"PyF {subject}\"\n process.start()\n\n PyFunceble.LOADER.config.update(original_config)\n PyFunceble.LOADER.inject_all()\n\n PyFunceble.INTERN.update(original_intern)\n\n return process\n\n return None", "def postprocess_dag(self, dag: networkx.DiGraph) -> networkx.DiGraph:\n return dag", "def _initJobs(self):\n super(DigestManager, self)._initJobs()\n conf = self.config.container_manager\n\n job4 = LoopingCall(self.performRequestedScan)\n job4.start(float(conf.activescan_interval))\n self.jobs.append(job4)", "def runScan(self):\n if os.path.isfile(self.scanProcessCard) == False:\n raise TypeError('Import valid scan process card!')\n runMG = GenerateProcesses()\n runMG.runMadGraph(self.scanProcessCard)", "def start_daemon(self):\n LOGGER.info(\"starting uploader daemon\")\n\n global RUN_STATE\n RUN_STATE.value = self.STATE_RUNNING\n\n # Record the start time of instantiation, so that we can report uptime\n self._start_time = time.time()\n\n # Create and start all workers\n self._workers = self._create_workers(start=True)\n LOGGER.debug(\"Started workers:\\n\\t%s\",\n \"\\n\\t\".join(sorted([w.name for w in self._workers])))", "def start(self): # pragma: no cover\n # Start the HAL and Fake Driver\n if self.hal_cmd_line:\n self.logger.info(\"Start the hal main process...\")\n process_obj = self.start_process(self.hal_cmd_line)\n self.hal_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.hal_process)\n\n # wait a period for process start and init complete\n time.sleep(self.PROCESS_INIT_PERIOD)\n if self.fake_driver_cmd_line:\n self.logger.info(\"Start the fake driver process...\")\n process_obj = self.start_process(self.fake_driver_cmd_line)\n self.fake_driver_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.fake_driver_process)\n\n for agent_name in self.agent_dict:\n self.logger.info(\"start agent process {}...\".format(agent_name))\n popenObj = self.start_process(self.agent_dict[agent_name])\n self.agent_obj[agent_name] = {\n \"process\": popenObj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.agent_obj[agent_name])\n\n # wait a period for agent start and init complete\n alive_status = False\n for timeout in range(self.WAITING_FOR_AGENT_STARTUP_RETRY):\n alive_status = ProcessAgent.is_all_agent_started()\n if not alive_status:\n time.sleep(1)\n if not alive_status:\n self.logger.error('Not all agent startup normally, reboot the system.')\n SysTools.sys_failure_reboot(reason='Not all agent startup')\n SysTools.diagnostic_self_test_fail('Communication error', 'Not all agent startup', 'Severity level=error')\n\n # start the manager process\n self.logger.info(\"Start the manager process...\")\n process_obj = self.start_process(self.manager_cmd_line)\n self.manager_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.manager_process)\n\n # start the fault_manager process\n self.logger.info(\"Start the fault manager process...\")\n process_obj = self.start_process(self.fault_manager_cmd_line)\n self.manager_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.manager_process)\n\n if self.ptp_driver_cmd_line:\n self.logger.info(\"Start the ptp driver client process...\")\n process_obj = self.start_process(self.ptp_driver_cmd_line)\n self.ptp_driver_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.ptp_driver_process)\n\n if self.res_hal_cmd_line:\n self.logger.info(\"Start the resource hal client process...\")\n process_obj = self.start_process(self.res_hal_cmd_line)\n self.res_driver_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.res_driver_process)\n\n if self.ssd_driver_cmd_line:\n self.logger.info(\"Start the ssd driver client process...\")\n process_obj = self.start_process(self.ssd_driver_cmd_line)\n self.ssd_driver_process = {\n \"process\": process_obj,\n \"retries\": 0,\n }\n manager_debugability.debugability_process_monitor(self.ssd_driver_process)\n\n while True:\n time.sleep(5)\n # monitor the all process\n manager_debugability.debugability_traceback()\n\n # monitor the manager process, will not retry....\n if self.manager_process is not None and self.manager_process['process'] is None:\n self.logger.error(\n \"Manager process is not up, reboot the system.\")\n if self.simulator_flag:\n sys.exit(-1)\n else:\n SysTools.sys_failure_reboot(reason=\"Manager process is not up\")\n SysTools.diagnostic_self_test_fail('Processing error', 'Manager process is not up',\n 'Severity level=error')\n\n for agent in self.agent_obj:\n # check if agent instance create succeed, retry if failure\n if None is self.agent_obj[agent][\"process\"]:\n if self.agent_obj[agent][\"retries\"] < self.AGENT_RETRIES_MAX:\n self.logger.warn(\n 'Agent %s retries %d times', agent, self.agent_obj[agent][\"retries\"])\n self.agent_obj[agent][\"process\"] = self.start_process(self.agent_dict[agent_name])\n self.agent_obj[agent][\"retries\"] += 1\n self.logger.warn('Agent %s retries %d times', agent, self.agent_obj[agent][\"retries\"])\n manager_debugability.debugability_process_monitor(self.agent_obj[agent])\n continue\n else:\n # FixMe: reboot system or ?\n self.logger.error('Agent %s retries times exceed, will reboot...', agent)\n SysTools.sys_failure_reboot(reason=\"Agent {0} retries times exceed\".format(agent))\n SysTools.diagnostic_self_test_fail('Communication error',\n \"Agent {0} retries times exceed\".format(agent),\n 'Severity level=error')\n\n if self.check_process_status(self.agent_obj[agent][\"process\"]) != self.PROCESSSTATE_ALIVE:\n self.logger.error(\n '%s process is dead, reboot the system.', agent)\n # FixMe: reboot system or restart agent\n SysTools.sys_failure_reboot(reason=\"{0} process is dead\".format(agent))\n SysTools.diagnostic_self_test_fail('Processing error', \"{0} process is dead\".format(agent),\n 'Severity level=error')\n # check other critical processes\n if self.ptp_driver_cmd_line:\n if self.check_process_status(self.ptp_driver_process[\"process\"]) != self.PROCESSSTATE_ALIVE:\n self.logger.error(\"ptp hal driver process is dead\")\n SysTools.sys_failure_reboot(reason=\"ptp hal driver process is dead\")\n SysTools.diagnostic_self_test_fail('Processing error', \"ptp hal driver process is dead\",\n 'Severity level=error')", "def run():\n logger.info(f\"Process started:\")\n logger.info(f\"Converting Glove file to Word2Vec format\")\n convert_to_word2vec.convert(\n \"./data/source/glove.6B.50d.txt\", \"./data/source/glove.6B.50d.w2vformat.txt\"\n )\n\n logger.info(f\"Extracting Click Stream data\")\n extract_click_stream_data()\n\n logger.info(\"Extracting Wiki articles\")\n extract_wiki_articles()\n\n logger.info(f\"Generating Clickstream dataset\")\n generate_datasets()\n\n logger.info(\"Tokenizing articles\")\n WikiArticlesTokenizer().process()\n\n logger.info(\"Creating dataset with Wiki Articles\")\n create_wiki_articles_dataset()", "def submit_dag(self):\n os.chdir(self.production.rundir)\n os.system(\"cat *_local.cache > local.cache\")\n\n for psdfile in self.production.get_psds(\"xml\"):\n ifo = psdfile.split(\"/\")[-1].split(\"_\")[1].split(\".\")[0]\n os.system(f\"cp {psdfile} {ifo}-psd.xml.gz\")\n\n\n self.before_submit()\n \n try:\n command = [\"condor_submit_dag\", \n \"-batch-name\", f\"rift/{self.production.event.name}/{self.production.name}\",\n os.path.join(self.production.rundir, \"marginalize_intrinsic_parameters_BasicIterationWorkflow.dag\")]\n dagman = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n self.logger.info(command, production = self.production)\n except FileNotFoundError as error:\n raise PipelineException(\"It looks like condor isn't installed on this system.\\n\"\n f\"\"\"I wanted to run {\" \".join(command)}.\"\"\")\n\n stdout, stderr = dagman.communicate()\n\n\n if \"submitted to cluster\" in str(stdout):\n cluster = re.search(\"submitted to cluster ([\\d]+)\", str(stdout)).groups()[0]\n self.production.status = \"running\"\n self.production.job_id = int(cluster)\n return cluster, PipelineLogger(stdout)\n else:\n raise PipelineException(f\"The DAG file could not be submitted.\\n\\n{stdout}\\n\\n{stderr}\",\n issue=self.production.event.issue_object,\n production=self.production.name)", "def local_parser(self, fl_lst):\n print(\"Running Local Parser ...\\n\")\n self.report_exec(local=True)\n time.sleep(0.5)\n\n for file in tqdm(fl_lst):\n if not file.endswith(\".pdf\"):\n self.logger.warning(\"Invalid File\", file)\n continue\n self.logger.info(\"Working on %s\" % file)\n self.file_cnt += 1\n\n try:\n info = self.translate_pdf(filename=file, filestream=None, fs_team_id=None)\n self.update_res_to_cache(info)\n self.suc_cnt += 1\n except Exception as err:\n self.failed_list.append(file)\n self.logger.error(\"[ERROR] %s\" % err)\n continue\n self.logger.info(\"Parser Finished for %s\" % file)\n\n self.cache_to_json()\n self.report_del()", "def _start(self, arbiter):\n self.transport_manager.start()\n for execution_manager in self.execution_managers:\n execution_manager.start()", "def start(self, *args):\n if args[0] == 'all':\n params = args[1:]\n for x in self.processers.keys():\n cmd = ['python', 'processmgr.py']\n cmd.append(x.replace('process', ''))\n cmd.extend(params)\n p = subprocess.Popen(cmd,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=False)\n self.processers[x] = p\n print 'run %s' % x\n else:\n cmd = ['python', 'processmgr.py']\n cmd.extend(args)\n p = subprocess.Popen(cmd,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n shell=False)\n \n self.processers['process%s' % args[0]] = p\n print 'run process%s.' % args[0]", "def run_manager(self) -> None:\n \n for p in self.process_list:\n try:\n p.daemon = True\n p.start()\n except:\n self.process_list.remove(p)\n p = Process(target=self.multiprocessing_job, args=(self.process_job,))\n p.daemon = True\n self.process_list.append(p)\n p.start()\n #Every 1 seconds, check for active Processes.\n while True:\n sleep(1)\n running = any(p.is_alive() for p in self.process_list)\n if not running or not active_children:\n self.restart_required = True\n break\n self.logger.info(self.name + \" has finished managing.\")", "def dispatch(self, filename):\n\n parser = self.find_parser(filename)\n if parser:\n parser.tell({\n 'command': 'parse',\n 'filename': filename\n })\n else:\n log.info('No parser for filename: {}'.format(filename))", "def run_project_parser(self):\n\n # get Ansible project structure\n self.__get_ansible_project_content()\n self.__generate_graph('project', self.__project_content)\n\n # get Ansible roles\n self.__get_ansible_roles_content()\n self.__generate_graph('roles', self.__role_content)" ]
[ "0.6902342", "0.59053934", "0.5773313", "0.55288255", "0.54555", "0.5432372", "0.5419414", "0.52692837", "0.5262612", "0.52342904", "0.5214548", "0.5171435", "0.5146328", "0.50828224", "0.50824994", "0.50663066", "0.50369257", "0.500083", "0.4956117", "0.49324197", "0.49278468", "0.4917471", "0.49136367", "0.49047726", "0.48984644", "0.4894575", "0.48795173", "0.4872358", "0.48716962", "0.48566622" ]
0.77809423
0
Heartbeat DAG file processor and restart it if we are not done.
def _heartbeat_manager(self): if not self._parent_signal_conn: raise ValueError("Process not started.") if self._process and not self._process.is_alive(): self._process.join(timeout=0) if not self.done: self.log.warning( "DagFileProcessorManager (PID=%d) exited with exit code %d - re-launching", self._process.pid, self._process.exitcode, ) self.start() if self.done: return parsing_stat_age = time.monotonic() - self._last_parsing_stat_received_at if parsing_stat_age > self._processor_timeout.total_seconds(): Stats.incr("dag_processing.manager_stalls") self.log.error( "DagFileProcessorManager (PID=%d) last sent a heartbeat %.2f seconds ago! Restarting it", self._process.pid, parsing_stat_age, ) reap_process_group(self._process.pid, logger=self.log) self.start()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def startHeartbeat(self):\n self.post.__sendHeartbeat()", "def _heartbeat_loop(self):\n # set last time so that \"if t_now - t_last >= HEARTBEAT_LOG_INTERVAL\"\n # below evalutes to True on the first run\n t_last = time.time() - HEARTBEAT_LOG_INTERVAL - 1\n while True:\n alive = 0\n # count alive processes \n for p in PROCESSES:\n if p.is_alive():\n alive += 1\n\n # no processes are alive - exit heartbeat loop\n if alive == 0:\n return\n\n t_now = time.time()\n if t_now - t_last >= HEARTBEAT_LOG_INTERVAL:\n # log heartbeat\n obj = { \n 'timestamp': time.time(),\n 'child_procs_total': self._procs_total,\n 'child_procs_alive': alive,\n 'probe_req_queue_len': self._probe_request_queue.qsize(),\n 'probe_resp_queue_len': \\\n self._probe_response_queue.qsize(), \n }\n \n # push to shared mem\n self._sm.set(config.BASE['SHARED_MEM_HEARTBEAT_KEY'],\n json.dumps(obj), HEARTBEAT_TTL)\n LOG.debug('pushed a heartbeat to the shared memory')\n\n t_last = t_now\n\n time.sleep(HEARTBEAT_LOOP_INTERVAL)", "def run(self):\n self.workhorse_.run()\n try:\n while(True):\n self.workhorse_.heartbeat()\n self.periodic_snapshot()\n except workflow.NoMoreWork:\n print \"Fini.\"\n exit(0)\n exit(-1)", "def _worker():\n try:\n logger.info('Looping...')\n temp_list = []\n for file in ['data_unfcc.csv','data_ebal.csv']:\n temp_list.append(os.path.isfile(file))\n if not all(temp_list):\n print('Starting from scratch...')\n download_source()\n create_database()\n create_index()\n\n time_mod = datetime.strptime(time.ctime(os.stat('data_ebal.csv').st_mtime),'%a %b %d %H:%M:%S %Y')\n time_now = datetime.now()\n\n if (time_now - time_mod).seconds > 3600:\n download_source()\n get_updated_records('unfcc')\n get_updated_records('ebal')\n create_index()\n except Exception as e:\n logger.warning('Main Loop error')", "def heartbeat_process(client_id):\n asyncio.run(Client.heartbeat(client_id))", "def heartbeat(self) -> None:\n if not self._parent_signal_conn:\n raise ValueError(\"Process not started.\")\n # Receive any pending messages before checking if the process has exited.\n while self._parent_signal_conn.poll(timeout=0.01):\n try:\n result = self._parent_signal_conn.recv()\n except (EOFError, ConnectionError):\n break\n self._process_message(result)\n\n # If it died unexpectedly restart the manager process\n self._heartbeat_manager()", "def run(self):\n if self.worker_is_alive:\n self.master_heartbeat_thread.join()", "def __sendHeartbeat(self):\n \n while not rospy.is_shutdown():\n rospy.sleep(5)\n self.setOutput(self.write_start+1,0)", "def start_watcher():\n while True:\n request_date = datetime.datetime.utcnow().strftime(\"%Y%m%d\")\n pull_request_from_remote(remote_files=\"*%s*\" % request_date)\n new_requests = check_for_new_request(request_date=request_date)\n if not new_requests:\n time.sleep(5)\n continue\n\n # noinspection PyTypeChecker\n for r in new_requests:\n print(\"Processing %s\" % r)\n try:\n ret = process_new_request(r, request_date=request_date,\n add2db=True)\n print(ret)\n except:\n os.system('cp -r %s /home/sedm/growth_marshal/archived/failed/'\n % r)\n os.system('cp -r %s /home/sedm/growth_marshal/archived/%s/' %\n (r, request_date))\n\n print(\"Waiting %ss before checking for new request\" % 5)\n time.sleep(5)", "def heartbeat_callback(self, session: Session = NEW_SESSION) -> None:\n if self.terminating:\n # ensure termination if processes are created later\n self.task_runner.terminate()\n return\n\n self.task_instance.refresh_from_db()\n ti = self.task_instance\n\n if ti.state == TaskInstanceState.RUNNING:\n fqdn = get_hostname()\n same_hostname = fqdn == ti.hostname\n if not same_hostname:\n self.log.error(\n \"The recorded hostname %s does not match this instance's hostname %s\",\n ti.hostname,\n fqdn,\n )\n raise AirflowException(\"Hostname of job runner does not match\")\n current_pid = self.task_runner.get_process_pid()\n recorded_pid = ti.pid\n same_process = recorded_pid == current_pid\n\n if recorded_pid is not None and (ti.run_as_user or self.task_runner.run_as_user):\n # when running as another user, compare the task runner pid to the parent of\n # the recorded pid because user delegation becomes an extra process level.\n # However, if recorded_pid is None, pass that through as it signals the task\n # runner process has already completed and been cleared out. `psutil.Process`\n # uses the current process if the parameter is None, which is not what is intended\n # for comparison.\n recorded_pid = psutil.Process(ti.pid).ppid()\n same_process = recorded_pid == current_pid\n\n if recorded_pid is not None and not same_process and not IS_WINDOWS:\n self.log.warning(\n \"Recorded pid %s does not match the current pid %s\", recorded_pid, current_pid\n )\n raise AirflowException(\"PID of job runner does not match\")\n elif self.task_runner.return_code() is None and hasattr(self.task_runner, \"process\"):\n if ti.state == TaskInstanceState.SKIPPED:\n # A DagRun timeout will cause tasks to be externally marked as skipped.\n dagrun = ti.get_dagrun(session=session)\n execution_time = (dagrun.end_date or timezone.utcnow()) - dagrun.start_date\n if ti.task.dag is not None:\n dagrun_timeout = ti.task.dag.dagrun_timeout\n else:\n dagrun_timeout = None\n if dagrun_timeout and execution_time > dagrun_timeout:\n self.log.warning(\"DagRun timed out after %s.\", execution_time)\n\n # potential race condition, the _run_raw_task commits `success` or other state\n # but task_runner does not exit right away due to slow process shutdown or any other reasons\n # let's do a throttle here, if the above case is true, the handle_task_exit will handle it\n if self._state_change_checks >= 1: # defer to next round of heartbeat\n self.log.warning(\n \"State of this instance has been externally set to %s. Terminating instance.\", ti.state\n )\n self.terminating = True\n self._state_change_checks += 1", "def start(self, exceptions):\n if not self._interval:\n return\n with self._lock:\n self._running.set()\n self._threshold = 0\n self._reads_since_check = 0\n self._writes_since_check = 0\n self._exceptions = exceptions\n LOGGER.debug('Heartbeat Checker Started')\n self._start_new_timer()", "def run(self):\n \n # Loop through all checkers to do an initial state check\n for checker in self.checkers:\n checker.update_last_state()\n\n # Send initial heartbeat\n self._send_heartbeat()\n \n # Main loop\n while True: \n html = \"\"\n for checker in self.checkers:\n if checker.just_changed_state():\n log.warn(\"Checker {} has changed state.\"\n .format(checker.name))\n html += \"<li>\" + checker.html() + \"</li>\\n\"\n \n if isinstance(checker, Process) and checker.state() == FAIL:\n log.warn(\"Process {} is not running.\"\n .format(checker.name))\n html += (\"<li>Attempting to restart \" + \n escape(checker.name) + \"...</li>\\n\")\n try:\n checker.restart()\n except MaxRetriesError, e:\n self.shutdown_reason = str(e)\n return\n time.sleep(5)\n html += (\"<li>State after restart: \" + \n checker.html() + \"</li>\\n\")\n\n if html:\n html = \"<h2>STATE CHANGED:</h2>\\n<ul>\\n\" + html + \"</ul>\\n\" \n html += self.html()\n html += run_commands(self.state_change_cmds)\n self.send_email_with_time(html=html,\n subject=\"Babysitter detected\"\n \" state change.\")\n\n if self._need_to_send_heartbeat():\n self._send_heartbeat()\n\n # Check if a new data subdir has been created\n if self.base_data_dir and self.sub_data_dir:\n if self._find_last_numeric_subdir() != self.sub_data_dir:\n self._send_heartbeat(\"<p>New subdir found so about to restart \"\n \"babysitter. Below are the last stats \"\n \"for the old data subdirectory.</p>\\n\")\n raise NewDataDirError()\n \n time.sleep(UPDATE_PERIOD)", "def start(self):\n self.register_exit_signals()\n\n set_new_process_group()\n\n self.log.info(\"Processing files using up to %s processes at a time \", self._parallelism)\n self.log.info(\"Process each file at most once every %s seconds\", self._file_process_interval)\n self.log.info(\n \"Checking for new files in %s every %s seconds\", self._dag_directory, self.dag_dir_list_interval\n )\n\n return self._run_parsing_loop()", "def watchdog(self):\n pass", "def restart():\n info = request.get_json() or {}\n delay_secs = int(info.get('delay', 0))\n\n t = threading.Timer(delay_secs, update_trigger_file)\n t.start()\n\n return jsonify('Success')", "def heartbeat(path, tags, args, source):\n\tprint \"---------------\"\n\tprint path\n\tprint args[0]\n\tprint args[1]\n\tprint args[2]\n\tprint args[3]", "def _monitor_loop(self):\n while self._continue_running():\n for wl in self._workloads:\n if not wl.running():\n self.log.info('%-20s FAILED', wl.name())\n self._restart_workload(wl)\n else:\n self.log.info('%-20s OK', wl.name())\n\n time.sleep(self._monitor_delay)", "def _restart(self):\n pass", "def after_make_runoff_file(msg, config, checklist):\n next_workers = {\"crash\": [], \"failure\": [], \"success\": []}\n return next_workers[msg.type]", "def start_processing(self):", "def startworking():\r\n #In the future have the manager program or from the website implement this arguments to a route\r\n #the program will download the file from the website\r\n global exe_name\r\n global Task_Conditional\r\n task_data = None\r\n while task_data is None:\r\n task_data = recieve_data_from_server(\"get_task\")\r\n if task_data is None:\r\n time.sleep(5)\r\n else:\r\n exe_name = task_data[\"exe_name\"]\r\n print('Working on the task \"{}\"'.format(exe_name))\r\n get_file(exe_name)\r\n Task_Conditional = task_data[\"Task_conditional\"]\r\n print(\"loading\")\r\n t1 = time.time()\r\n task_divider(task_data[\"first_num\"], task_data[\"last_num\"])\r\n t2 = time.time()\r\n print(\"ready {}\".format(t2-t1))", "def daemon_job(interval):\n time.sleep(3) # Wait for api server to start first\n while True:\n try:\n crawl()\n process_notification()\n except Exception:\n traceback.print_exc()\n time.sleep(interval)", "def heartbeat_event(self, event):\r\n while not self.imm_jobs.empty():\r\n imm_job = self.imm_jobs.get_nowait()\r\n imm_job(self)\r\n \r\n if self.do_reconfigure:\r\n self.selmgr.reconfigure(self.current_consensus())\r\n self.do_reconfigure = False\r\n \r\n if self.run_all_jobs:\r\n while not self.low_prio_jobs.empty() and self.run_all_jobs:\r\n imm_job = self.low_prio_jobs.get_nowait()\r\n imm_job(self)\r\n self.run_all_jobs = False\r\n return\r\n\r\n # If event is stream:NEW*/DETACHED or circ BUILT/FAILED, \r\n # don't run low prio jobs.. No need to delay streams for them.\r\n if PathBuilder.is_urgent_event(event): return\r\n \r\n # Do the low prio jobs one at a time in case a \r\n # higher priority event is queued \r\n if not self.low_prio_jobs.empty():\r\n delay_job = self.low_prio_jobs.get_nowait()\r\n delay_job(self)", "def startScandir(self):\n while self.isAlive:\n files = self.getNewFiles(self.inbox)\n while len(files) > 0:\n for full_filename in files:\n try:\n self.workflow.processFile(full_filename, 'new')\n except:\n et, ev, tb = sys.exc_info()\n serviceconfig.logger.error('got exception during the processing of the new file \"%s\"\\n\"%s\"' % (full_filename, str(ev)))\n serviceconfig.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))\n serviceconfig.sendMail('ERROR', 'File Processing FAILURE: %s' % str(et), 'Exception generated during the processing of the new file \"%s\":\\n%s\\n%s' % (full_filename, str(ev), ''.join(traceback.format_exception(et, ev, tb))))\n self.reportAction(full_filename, 'failure', str(et))\n files = self.getNewFiles(self.inbox)\n if self.timeout > 0:\n count = (self.timeout*60) / 10\n i = 0\n try:\n while self.isAlive:\n time.sleep(10)\n i = i+1\n if i >= count:\n break\n except:\n et, ev, tb = sys.exc_info()\n serviceconfig.logger.error('got Sleep exception \"%s\"' % str(ev))\n serviceconfig.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))\n serviceconfig.sendMail('ERROR', 'Sleep Processing FAILURE: %s' % str(et), 'Exception generated during the sleep process:\\n%s\\n%s' % (str(ev), ''.join(traceback.format_exception(et, ev, tb))))\n else:\n self.isAlive = False\n serviceconfig.logger.info('No more files to process. Exiting...')", "def test_heartbeat(self):\n pass", "def handle_heartbeat_response(self, message):\n self.directory.get(message.origin_node.get_name()).heartbeat()", "def _heart_beat(job_id, address, parent_pid=-1, log_file=\"\", wait_sec=45):\n while True:\n status = get_job_status(parent_pid, os.getpid())\n if os.path.exists(log_file):\n with open(log_file) as f:\n status[\"log_file\"] = f.read()\n _send_zmq_msg(job_id, \"heart_beat\", status, address)\n time.sleep(wait_sec)", "def onPreFork(self):", "def reprocess_all_feeds():\n logger.debug(\"Executing the heartbeat task and returning\")\n celery.send_task('process_all_rss', kwargs={'reprocess': True})\n return render_template('index.html', name=\"HEARTBEAT\")", "def after_make_v202111_runoff_file(msg, config, checklist):\n next_workers = {\"crash\": [], \"failure\": [], \"success\": []}\n return next_workers[msg.type]" ]
[ "0.59002155", "0.57033646", "0.56694525", "0.5598043", "0.55476713", "0.5539027", "0.5517709", "0.5490486", "0.546607", "0.5438334", "0.5388866", "0.5387239", "0.5341168", "0.53241", "0.52901435", "0.5288941", "0.5280215", "0.52654564", "0.5257743", "0.5253895", "0.5241949", "0.5238851", "0.5232418", "0.5224055", "0.520765", "0.52055943", "0.51674336", "0.51597816", "0.51573515", "0.515682" ]
0.64968365
0
Sync metadata from stat queue and only keep the latest stat.
def _sync_metadata(self, stat): self._done = stat.done self._all_files_processed = stat.all_files_processed self._last_parsing_stat_received_at = time.monotonic()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _push(self):\n if len(self._stat_now):\n self._stat_now['epoch_num'] = self.epoch_num\n self._stat_now['global_step'] = self.global_step\n\n self._stats.append(self._stat_now)\n self._stat_now = {}\n self._write_stat()", "def sync() -> None:", "def sync_update(self):\n for rec in self:\n if rec.ks_last_exported_date and rec.ks_sync_date:\n ks_reduced_ks_sync_time = rec.ks_last_exported_date - datetime.timedelta(seconds=30)\n ks_increased_ks_sync_time = rec.ks_last_exported_date + datetime.timedelta(seconds=30)\n if rec.ks_sync_date > ks_reduced_ks_sync_time and rec.ks_sync_date < ks_increased_ks_sync_time:\n rec.ks_sync_status = True\n else:\n rec.ks_sync_status = False\n else:\n rec.ks_sync_status = False", "def update_one_queue(queue):\n conn = pbs.pbs_connect(queue.server.name.encode('iso-8859-1', 'replace'))\n if conn==-1:\n logging.error(\"Cannot connect to %s - live data will be missing\" % server.name)\n return\n statqueues = pbs.pbs_statque(conn, queue.name.encode('iso-8859-1', 'replace') , [], \"\")\n pbs.pbs_disconnect(conn)\n if len(statqueues)==0:\n logging.error(\"pbs_statque failed for queue: %s\" % queue.name)\n return\n if len(statqueues)>1:\n logging.warning(\"pbs_statque returned more than one records for queue: %s\" % queue.name)\n\n attr_dict = dict([ (x.name,x.value) for x in statqueues[0].attribs])\n update_one_queue_from_pbs_data(queue, attr_dict)\n queue.save()", "def update_statistics(status):\n if not os.path.isfile(CONFIG['stats_file']):\n current_stats = {}\n else:\n current_stats = json.loads(open(CONFIG['stats_file'], 'r').read())\n # current_stats = delete_old_statistics(current_stats)\n\n current_key = int(datetime.datetime.now().strftime('%Y%m%d%H%M'))\n for host, state in ((h['host'], h['status']) for h in status):\n if host not in current_stats:\n current_stats[host] = {}\n\n # get newest entry of host\n newest_state = None, None\n for key, entry in current_stats[host].items():\n if newest_state[0] is None or int(key) > int(newest_state[0]):\n newest_state = key, entry\n if newest_state[1] != state:\n # state has changed. Write it.\n current_stats[host][current_key] = state\n\n # write stats\n open(CONFIG['stats_file'], 'w').write(json.dumps(current_stats))", "def stat_file(consumer_q, output_q):\n while True:\n fname = consumer_q.get()\n if fname == None:\n break\n\n # Get the modification time of the file and place the file name and\n # the mtime on the output queue.\n output_q.put( (fname, os.path.getmtime(fname)) )\n\n consumer_q.task_done()", "def _pre_sync(self):", "def refresh(self, **kwargs):\n traverse_setter(self, '_force', True)\n key = self.current_key if self.current_key else self.keys[0]\n stream_params = stream_parameters(self.streams)\n key = tuple(None if d in stream_params else k\n for d, k in zip(self.dimensions, key))\n stream_key = util.wrap_tuple_streams(key, self.dimensions, self.streams)\n self.update(stream_key)\n if self.comm is not None:\n self.push()", "def sync(self):\n pass", "def sync(self):\n pass", "def _post_sync(self):", "def do_sync(self):\n raise NotImplementedError() # pragma: no cover", "def sync_batch_stats(state: TrainState) -> TrainState:\n # Each device has its own version of the running average batch\n # statistics and those are synced before evaluation\n return state.replace(batch_stats=cross_replica_mean(state.batch_stats))", "def queueStatusAll():", "def finish_stager_tasks(self):\n\n update_files = {}\n messages = []\n while not self.finished_queue.empty():\n file = self.finished_queue.get()\n update_files[file['content_id']] = {'status': ContentStatus.AVAILABLE,\n 'pfn_size': file['pfn_size'],\n 'pfn': file['pfn']}\n msg = {'event_type': 'FILE_AVAILABLE',\n 'payload': {'scope': file['scope'],\n 'name': file['name'],\n 'startEvent': file['min_id'],\n 'lastEvent': file['max_id'],\n 'pfn': file['pfn']},\n 'created_at': date_to_str(datetime.datetime.utcnow())}\n messages.append(msg)\n\n self.logger.info('Got %s staged outputs' % len(update_files))\n update_contents_by_id(update_files)\n\n if self.send_messaging:\n for msg in messages:\n self.messaging_queue.put(msg)", "def sync(self, **kwargs):\n pass", "def sync_entry(self, entry):", "def update_queue_info(self):\n _logme.log('Updating queue_info', 'debug')\n queue_info1 = self.queue[self.id]\n self.queue.update()\n queue_info2 = self.queue[self.id]\n if queue_info2:\n self.queue_info = queue_info2\n elif queue_info1:\n self.queue_info = queue_info1\n elif self.queue_info is None and self.submitted:\n _logme.log('Cannot find self in the queue and queue_info is empty',\n 'warn')\n return self.queue_info", "def compress_pending_metadata_updates(key):\n instance = key.get()\n if not instance:\n logging.warning('Instance does not exist: %s', key)\n return\n\n if instance.active_metadata_update:\n logging.warning('Instance already has active metadata update: %s', key)\n return\n\n if not instance.pending_metadata_updates:\n return\n\n instance.active_metadata_update = compress_metadata_updates(\n instance.pending_metadata_updates)\n instance.pending_metadata_updates = []\n instance.put()", "async def async_update(self):\n _LOGGER.debug(\"%s: updating statistics\", self.entity_id)\n if self._samples_max_age is not None:\n self._purge_old()\n\n self._update_characteristics()\n\n # If max_age is set, ensure to update again after the defined interval.\n next_to_purge_timestamp = self._next_to_purge_timestamp()\n if next_to_purge_timestamp:\n _LOGGER.debug(\n \"%s: scheduling update at %s\", self.entity_id, next_to_purge_timestamp\n )\n if self._update_listener:\n self._update_listener()\n self._update_listener = None\n\n @callback\n def _scheduled_update(now):\n \"\"\"Timer callback for sensor update.\"\"\"\n _LOGGER.debug(\"%s: executing scheduled update\", self.entity_id)\n self.async_schedule_update_ha_state(True)\n self._update_listener = None\n\n self._update_listener = async_track_point_in_utc_time(\n self.hass, _scheduled_update, next_to_purge_timestamp\n )", "def update_link_statistics(self):\n if (self.track):\n key = self.id + \":\" + self.source + \"->\" + self.destination + \":\" \\\n + globals.BUFFEROCCUPANCY\n globals.statistics[key][globals.systime] = self.buffersize", "def qstat(self, *options):\n if self.in_queue():\n jobid = self.get_db('jobid')\n cmd = ['qstat'] + list(options) + [jobid]\n\n status, output, err = getstatusoutput(cmd,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n if status == 0:\n print(output)\n else:\n print(output + err)\n else:\n print('{} not in queue.'.format(self.directory))", "def transfer_progress(self, stats):", "def sync(self):\n return", "def update_on_demand_queue(cfg):\n\n # temp storage of all sprites to update\n update_list = list()\n\n while len(update_queue.update_queue) > 0:\n next_sprite = update_queue.update_queue.pop()\n update_list.append(next_sprite)\n #print(\"[update_on_demand_queue] Found in on demand queue:\", next_sprite.name)\n\n #print(\"[update_on_demand_queue] Updating on demand queue with contents:\", update_list)\n\n for s in update_list:\n s.update()", "def _sync(self):\n if self._conf.start_optime:\n # TODO optimize\n log.info(\"locating oplog, it will take a while\")\n oplog_start = self._conf.start_optime\n doc = self._src.client()['local']['oplog.rs'].find_one({'ts': {'$gte': oplog_start}})\n if not doc:\n log.error('no oplogs newer than the specified oplog')\n return\n oplog_start = doc['ts']\n log.info('start timestamp is %s actually' % oplog_start)\n self._last_optime = oplog_start\n self._sync_oplog(oplog_start)\n else:\n oplog_start = get_optime(self._src.client())\n if not oplog_start:\n log.error('get oplog_start failed, terminate')\n sys.exit(1)\n self._last_optime = oplog_start\n self._sync_databases()\n if self._optime_logger:\n self._optime_logger.write(oplog_start)\n log.info('first %s' % oplog_start)\n self._sync_oplog(oplog_start)", "def sync(self) -> None:\n for parameter in self.data_to_sync:\n assert hasattr(self, parameter), \\\n \"Parameter: %s does not exist in: %s\" % (parameter, self)\n self.publish(self.key_gen(parameter), getattr(self, parameter))", "def update_usage_queue(self, md5_hash):\n if md5_hash in self.usage_queue:\n self.remove_from_usage_queue(md5_hash)\n self.usage_queue.append(md5_hash)", "def sync(config, state, catalog):\n # Loop over selected streams in catalog\n for stream in catalog.get_selected_streams(state):\n LOGGER.info(\"Syncing stream:\" + stream.tap_stream_id)\n\n full_path = \"schemas/{}.json\".format(stream.tap_stream_id.lower())\n schema = utils.load_json(get_abs_path(full_path))\n singer.write_schema(\n stream_name=stream.tap_stream_id,\n schema=schema,\n key_properties=stream.key_properties,\n )\n\n get_token(config)\n interval = set_interval(lambda: get_token(config), 3500)\n get_report(stream.tap_stream_id, config, schema)\n interval.cancel()\n singer.write_state({\"last_updated_at\": str(datetime.now().isoformat()), \"stream\": stream.tap_stream_id})\n return", "def sync_info(self, sync_info):\n\n self._sync_info = sync_info" ]
[ "0.6097781", "0.6036069", "0.5823529", "0.5772382", "0.5762391", "0.57144344", "0.56363374", "0.55983853", "0.55737966", "0.55737966", "0.5540843", "0.55382794", "0.549456", "0.5484625", "0.5479431", "0.5431175", "0.5415981", "0.54098845", "0.5408363", "0.54003614", "0.5398192", "0.53756857", "0.53664684", "0.53662676", "0.53600293", "0.53515625", "0.5351473", "0.53448206", "0.5343723", "0.5336414" ]
0.76762164
0
Whether the DagFileProcessorManager finished.
def done(self) -> bool: return self._done
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def done(self):\n return self._is_done", "def _is_done(self):\n pass", "def done(self) -> bool:\n return pulumi.get(self, \"done\")", "def finished(self):\n return self._state == FINISHED_STATE", "def is_done(self):\n return self._done", "def has_finished():", "def isFinished(self):\n return False", "def finished(self):\n return False", "def is_done():\n return False", "def done(self):\n return self._info['status'] == 'DONE'", "def is_done(self) -> bool:\n is_done = self._process.poll() is not None\n\n return is_done", "def is_finished(self):\n if self.task_index + 1 >= self.image_count:\n return True\n return False", "def isFinish(self):\n return self.finish", "def done(self):\n return False", "def done(self) -> bool:", "def done(self):\n try:\n if self.doneCondition(self.currState): return True\n except:\n # Add to log\n print(\"WARNING: doneCondition looks to have error OR is not initialized\")\n\n return False", "def isDoneWithReactor(self):\n return True", "def isFinished():", "def isFinished():", "def isFinished():", "def is_finished(self):\n return self.controller.is_finished", "def is_detection_complete(self) -> bool:\n return self.__is_detection_completed", "def check_finish(self):\r\n return not self.proc.is_alive()", "def is_completed(self):\n self.logger.info(\"# dispatch completed: %s\", self.dispatch_completed)\n self.logger.info(\"@ num_queries={}, num_finished_jobs={}\".format(self.query_count, self.num_finished_jobs))\n for driver_id in sorted(self.dispatch_records.keys()):\n self.logger.info(\"driver_id={}, dispatch={}, reports={}\".format(driver_id, self.dispatch_records[driver_id], self.report_records[driver_id]))\n return self.dispatch_completed and (self.query_count == self.num_finished_jobs)", "def fileIsComplete(self):\n return True", "def is_done(self):\n\n return not self.thread.is_alive()", "def is_finished(self):\n\n return self.state != self.STATE_INIT and self.state != self.STATE_READY", "def is_done(self):\n return True if self.t >= self.max_ep_len else False", "def finished(self):\n if self._bt.root.status == Status.SUCCESS:\n return True\n if self._bt.root.status == Status.FAILURE:\n return True\n return False", "def finished(self):\n return get_num_finished(self.log_dir) == self.num_folds" ]
[ "0.717276", "0.7171391", "0.71180296", "0.7105147", "0.70777625", "0.70586365", "0.70552933", "0.70439535", "0.70309687", "0.7009928", "0.6952927", "0.69490445", "0.69233614", "0.6890246", "0.68833804", "0.6863327", "0.68570995", "0.6841842", "0.6841842", "0.6841842", "0.68373215", "0.68323725", "0.68236756", "0.6819452", "0.6810917", "0.6792186", "0.6788892", "0.6771622", "0.6771244", "0.676296" ]
0.72976714
0
Register signals that stop child processes.
def register_exit_signals(self): signal.signal(signal.SIGINT, self._exit_gracefully) signal.signal(signal.SIGTERM, self._exit_gracefully) # So that we ignore the debug dump signal, making it easier to send signal.signal(signal.SIGUSR2, signal.SIG_IGN)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def kill_child_processes(parent_pid, sig=signal.SIGTERM):\n try:\n parent = psutil.Process(parent_pid)\n except psutil.NoSuchProcess:\n return\n children = parent.children(recursive=True)\n for process in children:\n try:\n process.send_signal(sig)\n except psutil.NoSuchProcess:\n return", "def kill_all_processes(self, signal=signal.SIGINT) -> None:\n for task_name, sp in self.process_queue:\n sp.send_signal(signal)", "def stop_all(self, signal, frame):\n for event in self.event_list:\n event.set()\n for process in self.process_list:\n process.join()\n sys.exit()", "def setsignals():\n\tsignal.signal(signal.SIGINT, gracefulexit)\n\tsignal.signal(signal.SIGTERM, gracefulexit)\n\tsignal.signal(signal.SIGHUP, gracefulexit)", "def setup_termination():\n setpgrp()\n\n def terminate(signum, frame):\n logger.debug(\"SIGKILL received!\")\n logger.debug(\"%d, %s\", signum, frame)\n killpg(0, SIGKILL)\n\n signal(SIGINT, terminate)", "def register_signal_handler(self):\n signal.signal(signal.SIGINT, self.quit_gracefully)\n signal.signal(signal.SIGTERM, self.quit_gracefully)\n return", "def signal_handler(signum, frame):\n self.log.error(\"Received SIGTERM. Terminating subprocesses\")\n self.task_runner.terminate()\n self.handle_task_exit(128 + signum)", "def __sigint_handler(signal, frame):\n logging.debug(\"SIGINT or SIGTERM catched\")\n logging.debug(\"Raise t_stop_event\")\n t_stop_event.set() # Set stop flag to true for all launched threads\n logging.info(\"Stopping daemons...\")\n sleep(1)", "def capture_signals():\n # Both Linux and Windows signals\n signal.signal(signal.SIGINT, stop_server)\n signal.signal(signal.SIGTERM, stop_server)\n\n if platform.system() == \"Linux\": # Linux only\n signal.signal(signal.SIGQUIT, stop_server)\n signal.signal(signal.SIGHUP, stop_server)", "def _set_signal_handlers():\n\n def _handler(_signal, _frame):\n raise KeyboardInterrupt\n\n signal.signal(signal.SIGINT, _handler)\n signal.signal(signal.SIGTERM, _handler)", "def stop(self):\n os.killpg(os.getpgid(self.process.pid), signal.SIGTERM)", "def sigterm_caught(sig, frame):\n global child_pid\n print 'SIGTERM caught, killing wicd-monitor...'\n os.kill(child_pid, signal.SIGTERM)\n print 'Removing PID file...'\n if os.path.exists(wpath.pidfile):\n os.remove(wpath.pidfile)\n print 'Shutting down...'\n sys.exit(0)", "def setup_signals(self):\n if os.name == 'nt':\n return\n\n def shutdown_handler(signo):\n log.info('Shutting down on signal %d' % signo)\n self.shutdown_event.set()\n\n loop = asyncio.get_event_loop()\n for sig in [signal.SIGTERM, signal.SIGINT]:\n loop.add_signal_handler(sig, shutdown_handler, sig)", "def term_mp(sig_num, frame):\n pid = os.getpid()\n pgid = os.getpgid(os.getpid())\n logger.info(\"main proc {} exit, kill process group \"\n \"{}\".format(pid, pgid))\n os.killpg(pgid, signal.SIGKILL)", "def kill_all(name, sig=signal.SIGKILL):\n sig = int(sig)\n for proc in psutil.process_iter():\n if proc.name() == name:\n kill(proc.pid, sig)", "def stopProcesses(*args):\n _stopProcessSet(_running)", "def _on_parent_process_kill(self):", "def stop( self, parentPid ):\n if parentPid != os.getpid():\n self.logger.debug( 'Stop signal received but pids do not match' )\n return\n\n self.__stop()", "def signal_handler(signal, frame):\n output.write(\"SIGTERM received (launch_simulations talking)\\n\")\n p.kill(15)\n sys.exit(1)", "def hook_signals(self):\n signal.signal(signal.SIGTERM, self.quit)\n signal.signal(signal.SIGQUIT, self.quit)\n signal.signal(signal.SIGHUP, self.reload)", "def signal_handler(signal, frame):\n\n process_id = multiprocessing.current_process().name\n if process_id == 'child':\n return\n logger = logging.getlogger('signal_handler')\n logger.info('ctrl-c received.')\n logger.info('telling pipeline to shutdown')\n global pipeline\n pipeline.shutdown()", "def stop(self, signal):\n pass", "def stop(self, signal):\n pass", "def kill(self):\n self._stop_proc(signal.SIGKILL)", "def shutdown():\n os.kill(os.getpid(), signal.SIGTERM)", "def set_graceful_exit():\n\n for sig in [signal.SIGHUP,\n signal.SIGINT,\n signal.SIGQUIT,\n signal.SIGTERM]:\n signal.signal(sig, __exit_handler)", "def _set_signal_handler(self) -> None:\r\n loop = asyncio.get_running_loop()\r\n # get interupt signals supported by user's OS.\r\n signals = [getattr(signal, s) for s in (\r\n 'SIGBREAK', 'SIGINT', 'SIGTERM', 'SIGHUP') if hasattr(signal, s)]\r\n for s in signals:\r\n try:\r\n loop.add_signal_handler(\r\n s, lambda s=s: asyncio.create_task(self.shutdown(s)))\r\n except NotImplementedError:\r\n pass", "def stop(self, *args):\n if args[0] == 'all':\n for k, v in self.processers.items():\n if v:\n try:\n v.terminate()\n except:\n pass\n print 'Killed %s.' % k\n\n self.processers = dict.fromkeys(self.processers.keys())\n else:\n seq = args[0]\n try:\n self.processers['process%s' % seq].terminate()\n self.processers['process%s' % seq] = None\n print 'Killed process%s.' % seq\n except:\n print 'Have no process%s.' % seq", "def kill(name, signal=9, exact=False):\n for pid in find(name, exact):\n run(\"kill -s {0} {1}\".format(signal, pid))", "def kill_processes(self):\n for proc in self.processes:\n if proc['proc'].poll() is not None:\n proc['proc'].terminate()" ]
[ "0.6800868", "0.65543705", "0.65473795", "0.6545365", "0.6469888", "0.644261", "0.62947786", "0.6292127", "0.621205", "0.6175318", "0.61510694", "0.6035429", "0.59871596", "0.59867096", "0.5972011", "0.5968891", "0.59670824", "0.595869", "0.5954736", "0.5928941", "0.5890723", "0.58811235", "0.58811235", "0.5880394", "0.5870219", "0.58531237", "0.58170027", "0.5794178", "0.5766482", "0.5755598" ]
0.7050701
0
Scan at fix internal DAGs which are no longer present in files.
def _scan_stale_dags(self): now = timezone.utcnow() elapsed_time_since_refresh = (now - self.last_deactivate_stale_dags_time).total_seconds() if elapsed_time_since_refresh > self.parsing_cleanup_interval: last_parsed = { fp: self.get_last_finish_time(fp) for fp in self.file_paths if self.get_last_finish_time(fp) } DagFileProcessorManager.deactivate_stale_dags( last_parsed=last_parsed, dag_directory=self.get_dag_directory(), stale_dag_threshold=self.stale_dag_threshold, ) self.last_deactivate_stale_dags_time = timezone.utcnow()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup_in_progress_bugs_without_patches(self):\n raise NotImplementedError(\"not yet done\")", "def clean_condor(c):\n for fname in glob.glob(os.path.join(ROOT_DIR, '*.dag')):\n _delete_pattern(fname + '.*')\n for fname in glob.glob(os.path.join(ROOT_DIR, '*.sub')):\n temps = []\n with open(fname, 'r') as f:\n for line in f.readlines():\n for w in ('log', 'error', 'output'):\n if line.startswith(w):\n temp = line.split('=')[-1].strip()\n if os.path.isfile(temp):\n temps.append(temp)\n if temps:\n print(f'Found submit script {fname}:')\n for temp in temps:\n _delete_file(temp, indent=' ')\n _delete_file(f'{ROOT_DIR}/docker_stderror')\n _delete_file(f'{ROOT_DIR}/parsetab.py')", "def invalidate_for_files(self):\r\n return []", "def invalidate_for_files(self):\n return []", "def clean_stale_issues():\n from security_monkey.common.audit_issue_cleanup import clean_stale_issues\n clean_stale_issues()", "def remove_stale_files(self) -> None:\n\n for db in self.dbnodes:\n db.remove_stale_dbnode_files()", "def _refresh_dag_dir(self) -> bool:\n now = timezone.utcnow()\n elapsed_time_since_refresh = (now - self.last_dag_dir_refresh_time).total_seconds()\n if elapsed_time_since_refresh > self.dag_dir_list_interval:\n # Build up a list of Python files that could contain DAGs\n self.log.info(\"Searching for files in %s\", self._dag_directory)\n self._file_paths = list_py_file_paths(self._dag_directory)\n self.last_dag_dir_refresh_time = now\n self.log.info(\"There are %s files in %s\", len(self._file_paths), self._dag_directory)\n self.set_file_paths(self._file_paths)\n\n try:\n self.log.debug(\"Removing old import errors\")\n DagFileProcessorManager.clear_nonexistent_import_errors(file_paths=self._file_paths)\n except Exception:\n self.log.exception(\"Error removing old import errors\")\n\n def _iter_dag_filelocs(fileloc: str) -> Iterator[str]:\n \"\"\"Get \"full\" paths to DAGs if inside ZIP files.\n\n This is the format used by the remove/delete functions.\n \"\"\"\n if fileloc.endswith(\".py\") or not zipfile.is_zipfile(fileloc):\n yield fileloc\n return\n try:\n with zipfile.ZipFile(fileloc) as z:\n for info in z.infolist():\n if might_contain_dag(info.filename, True, z):\n yield os.path.join(fileloc, info.filename)\n except zipfile.BadZipFile:\n self.log.exception(\"There was an error accessing ZIP file %s %s\", fileloc)\n\n dag_filelocs = {full_loc for path in self._file_paths for full_loc in _iter_dag_filelocs(path)}\n\n from airflow.models.dagcode import DagCode\n\n SerializedDagModel.remove_deleted_dags(\n alive_dag_filelocs=dag_filelocs,\n processor_subdir=self.get_dag_directory(),\n )\n DagModel.deactivate_deleted_dags(\n dag_filelocs,\n processor_subdir=self.get_dag_directory(),\n )\n DagCode.remove_deleted_code(\n dag_filelocs,\n processor_subdir=self.get_dag_directory(),\n )\n\n return True\n return False", "def files_unchanged(self):\n\n passed = []\n failed = []\n ignored = []\n fixed = []\n could_fix = False\n\n # Check that we have the minimum required config\n required_pipeline_config = {\"manifest.name\", \"manifest.description\", \"manifest.author\"}\n missing_pipeline_config = required_pipeline_config.difference(self.nf_config)\n if missing_pipeline_config:\n return {\"ignored\": [f\"Required pipeline config not found - {missing_pipeline_config}\"]}\n try:\n prefix, short_name = self.nf_config[\"manifest.name\"].strip(\"\\\"'\").split(\"/\")\n except ValueError:\n log.warning(\n \"Expected manifest.name to be in the format '<repo>/<pipeline>'. Will assume it is <pipeline> and default to repo 'nf-core'\"\n )\n short_name = self.nf_config[\"manifest.name\"].strip(\"\\\"'\")\n prefix = \"nf-core\"\n\n # NB: Should all be files, not directories\n # List of lists. Passes if any of the files in the sublist are found.\n files_exact = [\n [\".gitattributes\"],\n [\".prettierrc.yml\"],\n [\"CODE_OF_CONDUCT.md\"],\n [\"LICENSE\", \"LICENSE.md\", \"LICENCE\", \"LICENCE.md\"], # NB: British / American spelling\n [os.path.join(\".github\", \".dockstore.yml\")],\n [os.path.join(\".github\", \"CONTRIBUTING.md\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"bug_report.yml\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"config.yml\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"feature_request.yml\")],\n [os.path.join(\".github\", \"PULL_REQUEST_TEMPLATE.md\")],\n [os.path.join(\".github\", \"workflows\", \"branch.yml\")],\n [os.path.join(\".github\", \"workflows\", \"linting_comment.yml\")],\n [os.path.join(\".github\", \"workflows\", \"linting.yml\")],\n [os.path.join(\"assets\", \"email_template.html\")],\n [os.path.join(\"assets\", \"email_template.txt\")],\n [os.path.join(\"assets\", \"sendmail_template.txt\")],\n [os.path.join(\"assets\", f\"nf-core-{short_name}_logo_light.png\")],\n [os.path.join(\"docs\", \"images\", f\"nf-core-{short_name}_logo_light.png\")],\n [os.path.join(\"docs\", \"images\", f\"nf-core-{short_name}_logo_dark.png\")],\n [os.path.join(\"docs\", \"README.md\")],\n [os.path.join(\"lib\", \"nfcore_external_java_deps.jar\")],\n [os.path.join(\"lib\", \"NfcoreTemplate.groovy\")],\n ]\n files_partial = [\n [\".gitignore\", \".prettierignore\", \"pyproject.toml\"],\n ]\n\n # Only show error messages from pipeline creation\n logging.getLogger(\"nf_core.create\").setLevel(logging.ERROR)\n\n # Generate a new pipeline with nf-core create that we can compare to\n tmp_dir = tempfile.mkdtemp()\n\n # Create a template.yaml file for the pipeline creation\n template_yaml = {\n \"name\": short_name,\n \"description\": self.nf_config[\"manifest.description\"].strip(\"\\\"'\"),\n \"author\": self.nf_config[\"manifest.author\"].strip(\"\\\"'\"),\n \"prefix\": prefix,\n }\n\n template_yaml_path = os.path.join(tmp_dir, \"template.yaml\")\n with open(template_yaml_path, \"w\") as fh:\n yaml.dump(template_yaml, fh, default_flow_style=False)\n\n test_pipeline_dir = os.path.join(tmp_dir, f\"{prefix}-{short_name}\")\n create_obj = nf_core.create.PipelineCreate(\n None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path\n )\n create_obj.init_pipeline()\n\n # Helper functions for file paths\n def _pf(file_path):\n \"\"\"Helper function - get file path for pipeline file\"\"\"\n return os.path.join(self.wf_path, file_path)\n\n def _tf(file_path):\n \"\"\"Helper function - get file path for template file\"\"\"\n return os.path.join(test_pipeline_dir, file_path)\n\n # Files that must be completely unchanged from template\n for files in files_exact:\n # Ignore if file specified in linting config\n ignore_files = self.lint_config.get(\"files_unchanged\", [])\n if any([f in ignore_files for f in files]):\n ignored.append(f\"File ignored due to lint config: {self._wrap_quotes(files)}\")\n\n # Ignore if we can't find the file\n elif not any([os.path.isfile(_pf(f)) for f in files]):\n ignored.append(f\"File does not exist: {self._wrap_quotes(files)}\")\n\n # Check that the file has an identical match\n else:\n for f in files:\n try:\n if filecmp.cmp(_pf(f), _tf(f), shallow=True):\n passed.append(f\"`{f}` matches the template\")\n else:\n if \"files_unchanged\" in self.fix:\n # Try to fix the problem by overwriting the pipeline file\n shutil.copy(_tf(f), _pf(f))\n passed.append(f\"`{f}` matches the template\")\n fixed.append(f\"`{f}` overwritten with template file\")\n else:\n failed.append(f\"`{f}` does not match the template\")\n could_fix = True\n except FileNotFoundError:\n pass\n\n # Files that can be added to, but that must contain the template contents\n for files in files_partial:\n # Ignore if file specified in linting config\n ignore_files = self.lint_config.get(\"files_unchanged\", [])\n if any([f in ignore_files for f in files]):\n ignored.append(f\"File ignored due to lint config: {self._wrap_quotes(files)}\")\n\n # Ignore if we can't find the file\n elif not any([os.path.isfile(_pf(f)) for f in files]):\n ignored.append(f\"File does not exist: {self._wrap_quotes(files)}\")\n\n # Check that the file contains the template file contents\n else:\n for f in files:\n try:\n with open(_pf(f), \"r\") as fh:\n pipeline_file = fh.read()\n with open(_tf(f), \"r\") as fh:\n template_file = fh.read()\n if template_file in pipeline_file:\n passed.append(f\"`{f}` matches the template\")\n else:\n if \"files_unchanged\" in self.fix:\n # Try to fix the problem by overwriting the pipeline file\n with open(_tf(f), \"r\") as fh:\n template_file = fh.read()\n with open(_pf(f), \"w\") as fh:\n fh.write(template_file)\n passed.append(f\"`{f}` matches the template\")\n fixed.append(f\"`{f}` overwritten with template file\")\n else:\n failed.append(f\"`{f}` does not match the template\")\n could_fix = True\n except FileNotFoundError:\n pass\n\n # cleaning up temporary dir\n shutil.rmtree(tmp_dir)\n\n return {\"passed\": passed, \"failed\": failed, \"ignored\": ignored, \"fixed\": fixed, \"could_fix\": could_fix}", "def remove_events(self):\n tofiltercalibfnames1, tofiltercalibfnames2 = split_into_telescopes(self.tofiltercalibfnames)\n\n nameiter = tqdm(self.superstarmcolfnames)\n for ssmcolfname in nameiter:\n nameiter.set_description(f\"Processing {path.basename(ssmcolfname)}\")\n currentrun = get_run(ssmcolfname)\n ssmcoldf = pd.read_csv(ssmcolfname, index_col=False)\n # newfname1 = self.single_remove_events(ssmcoldf, ssmcolfname, tofiltercalibfnames1, self.m1sscols)\n # newfname2 = self.single_remove_events(ssmcoldf, ssmcolfname, tofiltercalibfnames2, self.m2sscols)\n # if self.assert_filtermask_and_dfs(filtermask1, filtermask2, tofiltercalibdf1, tofiltercalibdf2, ssmcoldf) is False:\n # self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames1, currentrun), self.bugdir)\n # self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames2, currentrun), self.bugdir)\n # os.remove(newfname1)\n # os.remove(newfname2)\n # continue\n tofiltercalibdf1, oldname1 = self._merge_subruns(tofiltercalibfnames1,\n currentrun, getoldname=True)\n tofiltercalibdf2, oldname2 = self._merge_subruns(tofiltercalibfnames2,\n currentrun, getoldname=True)\n if len(tofiltercalibdf1) == 0:\n logger.warning(f\"calibfiles already processed, skipping {path.basename(ssmcolfname)} current run: {currentrun} \\n\")\n continue\n\n filtermask1 = self.get_filtermask(tofiltercalibdf1.iloc[:, self.mcolidx], ssmcoldf[self.m1sscols])\n filtermask2 = self.get_filtermask(tofiltercalibdf2.iloc[:, self.mcolidx], ssmcoldf[self.m2sscols])\n if self.invert is False:\n filtermask1, tofiltercalibdf1 = self.fix_idiosyncracies(filtermask1, tofiltercalibdf1, ssmcoldf[self.m1sscols])\n filtermask2, tofiltercalibdf2 = self.fix_idiosyncracies(filtermask2, tofiltercalibdf2, ssmcoldf[self.m2sscols])\n\n if self.assert_filtermask_and_dfs(filtermask1, filtermask2, tofiltercalibdf1, tofiltercalibdf2, ssmcoldf) is False:\n self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames1, currentrun), self.bugdir)\n self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames2, currentrun), self.bugdir)\n continue\n\n newfname1 = self._get_newfilename(oldname=oldname1)\n newfname2 = self._get_newfilename(oldname=oldname2)\n\n nameiter.set_description(f\"Saving {path.basename(newfname1)}\")\n tofiltercalibdf1.iloc[filtermask1].to_csv(newfname1, header=False, index=False)\n nameiter.set_description(f\"Saving {path.basename(newfname2)}\")\n tofiltercalibdf2.iloc[filtermask2].to_csv(newfname2, header=False, index=False)\n self.outfilenames.extend([newfname1, newfname2])\n # self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames1, currentrun))\n # self._move_processed_files(self._handle_fnames_for_merging(tofiltercalibfnames2, currentrun))", "def deadpathck(banana_file, type_table, emitter=emit.PrintEmitter()):\n # Check that first argument is a banana file. Mainly\n # an excuse to remove the F401 warning.\n if not isinstance(banana_file, ast.BananaFile):\n raise Exception(\"Expected BananaFile as first argument.\")\n\n # Look first for all branch that are \"dead\"\n connections = banana_file.connections # type: ast.Connection\n\n # If there are no connections everything is considered\n # as dead.\n if connections is None:\n class EmptyConnections(object):\n connections = []\n connections = EmptyConnections()\n\n # Collect the nodes and connect them.\n dag_nodes = {}\n # Create all the nodes\n for ident in banana_file.components.keys():\n dag_nodes[ident] = dag.DagNode(type_table.get_type(ident))\n # Connect them\n for ident_from, ident_to in connections.connections:\n dag_from = dag_nodes[ident_from]\n dag_to = dag_nodes[ident_to]\n dag_from.children.append(dag_to)\n dag_to.parents.append(dag_from)\n\n # Start from every sources and for each, check if the path is dead\n for node in dag_nodes.values():\n if isinstance(node.typec, type_util.Source):\n node.visit()\n\n # We can now remove all the components that are \"dead\"\n # from the list of connections\n for ident, node in dag_nodes.iteritems():\n if not node.is_alive():\n emitter.emit_warning(\n ident.span,\n \"Dead code found, this component is not in a path \"\n \"starting from a 'Source' and ending with a 'Sink'.\"\n )\n banana_file.components.pop(ident)\n connections.connections = filter(\n lambda edge: edge[0] != ident and edge[1] != ident,\n connections.connections\n )\n\n # TODO(Joan): We could also remove them from the statements.\n # TODO(Joan): But for this we need a dependency graph between\n # TODO(Joan): statements to make sure we don't break the code.", "def fix(self):\n print 'Can\\'t be auto fixed, please select to check and fix it manually.'\n # pm.delete(self.errorNodes)", "def _clean_files(self):\n if self.delfiles & 1:\n ProcUtils.remove(self.okm)\n if self.delfiles & 2:\n ProcUtils.remove(self.hkm)\n if self.delfiles & 4:\n ProcUtils.remove(self.qkm)\n if self.delfiles & 8:\n ProcUtils.remove(self.obc)\n\n if self.log is False:\n ProcUtils.remove(self.pcf_file)\n base = os.path.basename(self.okm)\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogReport', base])))\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogStatus', base])))\n ProcUtils.remove(os.path.join(self.dirs['run'],\n '.'.join(['LogUser', base])))", "def _purge_stale_checkpoints(self):\n if len(self._checkpoint_files) > self.max_checkpoints:\n purge_files = self._checkpoint_files[: -self.max_checkpoints]\n self._checkpoint_files = self._checkpoint_files[-self.max_checkpoints:]\n for chk in purge_files:\n silent_try(chk.purge_values)", "def listBadRefScripts(self):\n if not self.log: return\n ids = []\n for record in self.records:\n if record.name != 'SCPT': continue\n rnam = record.rnam\n if rnam and rnam.data == chr(255)*4:\n ids.append(record.getId())\n if ids:\n self.log.setHeader(_('Detached Global Scripts'))\n for id in sorted(ids,key=string.lower):\n self.log(id)", "def do_maint (self):\n self.log.info (\"cleanup\")\n current = glob.glob (os.path.join (self.infodir, self.infomask))\n removed = set(self.infomap) - set(current)\n for fn in removed:\n self.log.debug (\"forgetting file %s\", fn)\n del self.infomap[fn]\n self.log.info (\"current: %i, removed: %i\", len(current), len(removed))\n self.maint_timer = threading.Timer (self.maint_period, self.do_maint)\n self.maint_timer.start()", "def scan_root(str_ri, lst_rx, lst_rxe, db):\n print(f\"scan_root(); Scanning: {str_ri, lst_rx, lst_rxe}\")\n # get db info on this dir\n dic_db = dict() # dic by ffn of files known to the db\n str_sql = f\"SELECT * FROM files where filename like '{str_ri}%'\"\n for row in db.execute(str_sql):\n dic_db[row[0]] = row\n # Remove files that no longer exist\n lst_del_this = list()\n for str_ffn_db in dic_db.keys():\n if not os.path.isfile(str_ffn_db):\n lst_del_this.append(str_ffn_db)\n str_ffn_db__sql = str_ffn_db.replace(\"'\", \"''\")\n str_sql = f\"DELETE FROM files WHERE filename='{str_ffn_db__sql}';\"\n db.execute(str_sql)\n db.commit()\n for itm in lst_del_this: # can't change iterable from inside loop\n del dic_db[itm]\n # Walk the root-dir\n num_cntfil = 0\n for root, dirs, files in os.walk(str_ri):\n for str_fn in files:\n # if str_fn.lower().endswith('.jpg'):\n # print(str_fn)\n num_cntfil += 1\n if not any([str_fn.endswith(e) for e in lst_rxe]):\n str_ffn = os.path.join(root, str_fn)\n if not any([str_ffn.startswith(x) for x in lst_rx]): # if the file is not excluded\n if str_ffn in dic_db.keys(): # db knows this file\n obj_bdg = dic_db[str_ffn]\n tim, siz = timeandsize(str_ffn)\n if tim == dic_db[str_ffn][1] and siz == dic_db[str_ffn][2]:\n pass # print(f\" - skipping known file: {str_ffn} == {dic_db[str_ffn]}\") #\n else:\n ## print(f\"WTF: tim? {tim == dic_db[str_ffn][1]} siz? {siz == dic_db[str_ffn][2]} @ ffn: {str_ffn}\")\n # time or date have changed - so re-scanning file, and update DB.\n str_sql = f\"DELETE FROM files WHERE filename='{str_ffn}';\"\n db.execute(str_sql)\n db.commit()\n add_file2db(str_ffn, db)\n else: # db don't know this file - add it.\n add_file2db(str_ffn, db)\n if num_cntfil % 1000000 == 0:\n print(f\"Count: {num_cntfil}: {str_ffn}\")", "def collect_garbage(results, task, visited_nodes, targets, dag):\n for ancestor in dag.predecessors(task):\n is_obsolete = all(\n successor in visited_nodes for successor in dag.successors(ancestor)\n )\n\n if is_obsolete and ancestor not in targets:\n del results[ancestor]\n\n return results", "def report_unused_cycle_suppressions(self, reporter):\n for module in self.get_modules():\n for dep in module.get_dependencies():\n if not dep.suppression_used:\n reporter.cyclic_issue(\"unused cycle suppression: {0} -> {1}\".format(module.get_name()[7:], dep.get_other_module().get_name()[7:]))", "def __purge_old_files(self):\n\n chkpts = self.checkpointer.sorted_checkpoints()\n p_chkpts = []\n e_chkpts = []\n for c in chkpts:\n if c.startswith(self.checkpointer.prefix + CheckpointingCallback.PERIODIC_PREFIX):\n p_chkpts.append(c)\n\n if c.startswith(self.checkpointer.prefix + CheckpointingCallback.EPOCH_PREFIX):\n e_chkpts.append(c)\n\n # Delete periodic checkpoints\n if self.max_files is not None and len(p_chkpts) > self.max_files:\n for c in p_chkpts[self.max_files:]:\n log.debug(\"CheckpointingCallback deleting {}\".format(c))\n self.checkpointer.delete(c)\n\n # Delete older epochs\n if self.max_epochs is not None and len(e_chkpts) > self.max_epochs:\n for c in e_chkpts[self.max_epochs:]:\n log.debug(\"CheckpointingCallback deleting (epoch) {}\".format(c))\n self.checkpointer.delete(c)", "def flake8_scan_file(commit_sha, owner, repo, parent_sha=None):\n if parent_sha is None:\n parent_sha = get_commit_parent(commit_sha, owner, repo)\n diff_url = GIT_COMPARE_URL.format(base=parent_sha,\n head=commit_sha,\n owner=owner,\n repo=repo,\n host=host_api)\n diff_info = get(diff_url, auth=auth).json()\n diff_content = get(diff_url,\n auth=auth,\n headers={\"Accept\": \"application/vnd.github.v3.diff\"}\n ).content.decode('utf8')\n patch_set = PatchSet(diff_content)\n comments_per_file = {}\n for file in diff_info['files']:\n content = get(file['contents_url'], auth=auth).json()\n file_content = get(content['download_url']).content\n with open(\"flake8_tmp_file.py\", 'wb') as test_file:\n test_file.write(file_content)\n style_guide = flake8.get_style_guide(ignore=['E24', 'W503'])\n style_guide.input_file('./flake8_tmp_file.py', )\n results = style_guide._application.file_checker_manager.checkers[\n 0].results\n comments_per_line = {}\n for code, line_n, offset, text, src in results:\n if changed_in_diff(get_file_by_name(patch_set,\n file['filename']), line_n):\n comments = comments_per_line.get(line_n, [])\n comments.append((file['filename'], line_n, offset, code, text))\n comments_per_line[line_n] = comments\n comments_per_file[file['filename']] = comments_per_line\n return comments_per_file", "def find_todo():\n remove_pyc_files()\n local('grep -ir \"TODO\" *')", "def clear_file_level_issues(self, tests_for_rerun, out_file):\n if tests_for_rerun is None:\n return 0\n\n cleared_file_level_issues = 0\n # Find the unique set of files that are covered by the given tests\n # that are to be rerun. We derive the files that are eligible for\n # having their markers cleared, because we support running in a mode\n # where only flaky tests are eligible for rerun. If the file-level\n # issue occurred in a file that was not marked as flaky, then we\n # shouldn't be clearing the event here.\n basename_set = set()\n for test_file_relpath in tests_for_rerun:\n basename_set.add(os.path.basename(test_file_relpath))\n\n # Find all the keys for file-level events that are considered\n # test issues.\n file_level_issues = [(key, event)\n for key, event in self.result_events.items()\n if ResultsFormatter._is_file_level_issue(\n key, event)\n and event.get(\"status\", \"\") in\n EventBuilder.TESTRUN_ERROR_STATUS_VALUES]\n\n # Now remove any file-level error for the given test base name.\n for key, event in file_level_issues:\n # If the given file base name is in the rerun set, then we\n # clear that entry from the result set.\n if os.path.basename(key) in basename_set:\n self.result_events.pop(key, None)\n cleared_file_level_issues += 1\n if out_file is not None:\n out_file.write(\n \"clearing file-level issue for file {} \"\n \"(issue type: {})\"\n .format(key, event.get(\"status\", \"<unset-status>\")))\n\n return cleared_file_level_issues", "def removeNonKeepFiles(self):\n\n if not self.deleteNonKeepFiles:\n self.logger.info('Deletion of non keep files (to save space) has not been requested.')\n return\n\n self.logger.info('Deletion of non-keep files is requested. Scanning...')\n \n toDelete = []\n for root, dirs, files in os.walk(self.runPath):\n toDelete.extend([os.path.join(root, f) for f in files if os.path.join(root, f) not in self.keepFiles.keys()])\n # os.walk ignores links, explicitly get them\n toDelete.extend([os.path.join(root, f) for f in os.listdir(root) if os.path.islink(os.path.join(root,f)) and os.path.join(root,f) not in self.keepFiles.keys()])\n\n for thing in os.listdir(self.runPath):\n self.logger.debug('removeNonKeepFiles::before delete: %s' % str(thing))\n \n for thing in toDelete:\n if thing in self.neverCopyAndNeverDelete:\n self.logger.debug('removeNonKeepFiles::Not deleting neverCopyAndNeverDelete file: \\n%s' % thing)\n continue\n try:\n os.remove(thing)\n self.logger.debug('Deleted: %s' % thing)\n except:\n message = 'Unable to delete non-keepfile: %s\\n' % thing\n message += exc2string2()\n self.logger.error(message)", "def _cleanUp(self):\r\n limit = datetime.now() - timedelta(seconds=self._timeout)\r\n\r\n toClean = [msg for msg in self._incompleteMsgs if msg.older(limit)]\r\n\r\n if toClean:\r\n for msg in toClean:\r\n self._incompleteMsgs.remove(msg)\r\n\r\n log.msg('{0} incomplete messages have been dropped '\r\n 'from assembler.'.format(len(toClean)))\r\n\r\n toClean = [uri for uri, (_, timestamp) in self._binaries.iteritems()\r\n if timestamp < limit]\r\n\r\n if toClean:\r\n for uri in toClean:\r\n del self._binaries[uri]\r\n\r\n log.msg('{0} unused binaries have been dropped '\r\n 'from assembler.'.format(len(toClean)))", "def fix(self):\n\n pm.delete(self.errorNodes)\n\n self.run()", "def resolve(self): # HashMap.resolve\n prevCount = self.allFiles.count_deleted()\n\n # no need to resolve uniques, so remove them from the HashMap\n deleteList=[]\n for hashval, list in self.contentHash.iteritems():\n if len(list) == 1:\n deleteList.append(hashval)\n for e in deleteList:\n del self.contentHash[e]\n\n # delete the directories first, in order of\n # increasing depth\n if verbose:\n print '# checking candidates from depth ' + str(self.minDepth) + ' through ' + str(self.maxDepth)\n for currentDepth in xrange(self.minDepth-1,self.maxDepth+1):\n for hashval, list in self.contentHash.iteritems():\n example = list[0]\n if isinstance(example, DirObj):\n winner, losers = resolve_candidates(list, currentDepth)\n if losers != None:\n for loser in losers:\n if not loser.deleted:\n if verbose:\n print '# dir \"' + loser.pathname + '\" covered by \"' + winner.pathname + '\"'\n self.delete(loser)\n loser.winner = winner\n self.prune()\n\n for hashval, list in self.contentHash.iteritems():\n example = list[0] \n if isinstance(example, FileObj):\n winner, losers = resolve_candidates(list)\n for loser in losers:\n if not loser.deleted:\n if verbose:\n print '# file \"' + loser.pathname + '\" covered by \"' + winner.pathname + '\"'\n self.delete(loser)\n loser.winner = winner\n\n return self.allFiles.count_deleted() - prevCount", "def postCompile(root, excluded):\n # If you need to delete specific -parsed but not wanted- files, do it here\n pass", "def _prepare_manual_resolve(self):\n # Files that have been deleted between branch and cherry-pick will not have\n # their skip-worktree bit set so set it manually for those files to avoid\n # git status incorrectly listing them as unstaged deletes.\n repo_status = self._run_git_command(\n ['-c', 'core.quotePath=false', 'status', '--porcelain']).splitlines()\n extra_files = [f[3:] for f in repo_status if f[:2] == ' D']\n if extra_files:\n self._run_git_command_with_stdin(\n ['update-index', '--skip-worktree', '--stdin'],\n stdin='\\n'.join(extra_files) + '\\n')", "def prune(c):\n with conn.cd(utils.join(SALT_DEPLOY_PATH, utils.DEPLOY_RELEASES_DIR)):\n releases = [\n d.replace(\"./\", \"\").strip()\n for d in conn.run(\"find . -maxdepth 1 -mindepth 1 -type d\", pty=True)\n .stdout.strip()\n .split(\"\\n\")\n ]\n releases.sort()\n\n diff = len(releases) - int(SALT_KEEP_RELEASES)\n print(\n f\"Found {len(releases)} current releases; set to keep {SALT_KEEP_RELEASES}\"\n )\n if diff > 0:\n to_delete = releases[:diff]\n print(f\"Cleaning up {len(to_delete)} old release(s)\")\n conn.run(f\"rm -rf {' '.join(to_delete)}\")\n else:\n print(\"Nothing to do\")", "def _CheckNoIn(input_api, output_api):\n results = []\n for f in input_api.AffectedFiles(include_deletes=False):\n if f.LocalPath().endswith('.in'):\n results.append(output_api.PresubmitError(\n 'Remove %s since corpus tests should not use .in files' % f.LocalPath()))\n return results" ]
[ "0.60389453", "0.55653155", "0.55171776", "0.5486258", "0.5453722", "0.54526615", "0.543999", "0.54291993", "0.5398894", "0.5323134", "0.531323", "0.52942467", "0.5255985", "0.5253049", "0.523504", "0.5208834", "0.5197419", "0.51893747", "0.51579833", "0.51465344", "0.5143649", "0.5138784", "0.5124691", "0.5121995", "0.5089027", "0.5084383", "0.5057971", "0.504648", "0.50458556", "0.5040974" ]
0.5682571
1
Refresh file paths from dag dir if we haven't done it for too long.
def _refresh_dag_dir(self) -> bool: now = timezone.utcnow() elapsed_time_since_refresh = (now - self.last_dag_dir_refresh_time).total_seconds() if elapsed_time_since_refresh > self.dag_dir_list_interval: # Build up a list of Python files that could contain DAGs self.log.info("Searching for files in %s", self._dag_directory) self._file_paths = list_py_file_paths(self._dag_directory) self.last_dag_dir_refresh_time = now self.log.info("There are %s files in %s", len(self._file_paths), self._dag_directory) self.set_file_paths(self._file_paths) try: self.log.debug("Removing old import errors") DagFileProcessorManager.clear_nonexistent_import_errors(file_paths=self._file_paths) except Exception: self.log.exception("Error removing old import errors") def _iter_dag_filelocs(fileloc: str) -> Iterator[str]: """Get "full" paths to DAGs if inside ZIP files. This is the format used by the remove/delete functions. """ if fileloc.endswith(".py") or not zipfile.is_zipfile(fileloc): yield fileloc return try: with zipfile.ZipFile(fileloc) as z: for info in z.infolist(): if might_contain_dag(info.filename, True, z): yield os.path.join(fileloc, info.filename) except zipfile.BadZipFile: self.log.exception("There was an error accessing ZIP file %s %s", fileloc) dag_filelocs = {full_loc for path in self._file_paths for full_loc in _iter_dag_filelocs(path)} from airflow.models.dagcode import DagCode SerializedDagModel.remove_deleted_dags( alive_dag_filelocs=dag_filelocs, processor_subdir=self.get_dag_directory(), ) DagModel.deactivate_deleted_dags( dag_filelocs, processor_subdir=self.get_dag_directory(), ) DagCode.remove_deleted_code( dag_filelocs, processor_subdir=self.get_dag_directory(), ) return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def refresh_all(self) -> None:\n self._update_thread.force_refresh_folder(self.feed_cache)", "def _scan_stale_dags(self):\n now = timezone.utcnow()\n elapsed_time_since_refresh = (now - self.last_deactivate_stale_dags_time).total_seconds()\n if elapsed_time_since_refresh > self.parsing_cleanup_interval:\n last_parsed = {\n fp: self.get_last_finish_time(fp) for fp in self.file_paths if self.get_last_finish_time(fp)\n }\n DagFileProcessorManager.deactivate_stale_dags(\n last_parsed=last_parsed,\n dag_directory=self.get_dag_directory(),\n stale_dag_threshold=self.stale_dag_threshold,\n )\n self.last_deactivate_stale_dags_time = timezone.utcnow()", "async def start_watching_roots(self):\n db.clear_visits(self.db_conn)\n for root in self.config.roots:\n await self.watch_tree(root)\n\n for path in db.get_unvisited_files(self.db_conn):\n print(path)\n await self.process_change(path, None)", "def remove_stale_files(self) -> None:\n\n for db in self.dbnodes:\n db.remove_stale_dbnode_files()", "def refresh(self):\n self.config.read(self.filename)\n self.loadRecentFiles()", "def background_import_dir_and_watch(bv):\n background_import_dir(bv, watch=True)", "async def managePathsToWatchQueue(self) :\n\n while self.continueWatchingFS :\n addPath, aPathToWatch, theWatch = await self.pathsToWatchQueue.get()\n\n if addPath :\n for aPath in self.get_directories_recursive(Path(aPathToWatch)) :\n try :\n self.numWatches = self.numWatches + 1\n self.inotify.add_watch(aPath, self.wrMask)\n self.logger.debug(f'INIT: watching {aPath}')\n except PermissionError as err :\n pass\n except Exception as err:\n print(f\"Exception while trying to watch: [{aPath}]\")\n traceback.print_exc(err)\n # we can't watch this path just yet...\n # ... schedule its parent and try again...\n await self.watchAPath(aPath.parent)\n else :\n # according to the documentation.... the corresponding\n # Mask.IGNORE event will automatically remove this watch.\n #self.inotify.rm_watch(theWatch)\n self.numUnWatches = self.numUnWatches + 1\n self.logger.debug(f'INIT: unWatching {aPathToWatch}')\n if aPathToWatch in self.rootPaths :\n self.logger.debug(f'INIT: found root path... rewatching it {aPathToWatch}')\n await self.watchAPath(aPathToWatch)\n self.pathsToWatchQueue.task_done()", "def _LoadNewPaths(self):\n paths = sorted(path\n for path in io_wrapper.ListDirectoryAbsolute(self._directory)\n if self._path_filter(path))\n for path in paths:\n if path not in self._paths:\n logger.info('New path detected: %s.' % path)\n self._paths[path] = _EventPathLoader(path, self._loader_factory)", "def refresh(self):\n self.update_from_file()\n self.update_from_env()", "def _post_update_paths(self, **kwargs):\n\n files_updated = kwargs.get('files_updated', list())\n if not files_updated:\n return\n\n maya_utils.reload_textures(files_updated)\n\n # Dependencies are already reloaded during update paths process\n # maya_utils.reload_dependencies(files_updated)", "def autoBuildTick (self, event = None):\r\n for pathname, oldmtime in self.autobuildfiles.iteritems():\r\n newmtime = os.stat(pathname).st_mtime\r\n if newmtime != oldmtime:\r\n #print \"Auto rebuild triggered by: \", pathname\r\n self.autobuildfiles[pathname] = newmtime\r\n self.rebuild()\r\n break", "def _update_files():\n configuration_settings = get_configuration()\n\n # Need to find all of the files that are stored in the input_files directories in order to start building the\n # reports that will be used to generate the static log files.\n for input_path in configuration_settings.processing.inputs:\n search_path = pathlib.Path(input_path)\n\n # Currently going to make the assumption that everyone is using the path naming convention that I'm dictating\n # which is YYYY/MM/DD/file.ext\n for file_component in search_path.glob('*/*/*/*'):\n # Store all of the files into a dictionary containing the keys and a list of the files that are associated\n # with that day\n updaters.update_files(search_path, file_component)", "def update(self):\n if os.path.isdir(self.full_path):\n self.file_list = os.listdir(self.full_path)\n else:\n self.file_list = []", "def sync_dir(self):\n\n # mark the trajectories that we have seen\n trajectories = os.listdir(self.trajectory_dir)\n \n for trajectory_file in trajectories:\n\n if trajectory_file not in self.seen_trajectories:\n\n created = self.upload_trajectory(trajectory_file)\n self.seen_trajectories.add(trajectory_file)\n\n if created is True:\n print \"Total of %s solved trajectories\" % \\\n SolvedTrajectory.objects.count(), created", "def autodiscover(self):\n\n old_packages = self.list_task_packages()\n\n files = os.listdir(self.tasks_dir)\n for filename in files:\n pkg_dir = os.path.join(self.tasks_dir, filename)\n if os.path.isdir(pkg_dir):\n self.read_task_package(filename)\n old_packages.discard(filename)\n\n for pkg_name in old_packages:\n self.emit('TASK_REMOVED', pkg_name)", "async def start_polling_revisits(self):\n while True:\n now = time.time()\n next_revisit_time, revisit_paths = db.due_for_revisit(self.db_conn, now)\n self.log(\n \"Next revisit time: {} ({}s), due now: {}\".format(\n next_revisit_time,\n (next_revisit_time or now) - now,\n len(revisit_paths),\n )\n )\n\n for path in revisit_paths:\n try:\n stats = os.stat(path, follow_symlinks=False)\n except FileNotFoundError:\n stats = None\n await self.process_change(path, stats)\n else:\n if next_revisit_time is None:\n async with self.revisit_cond:\n await self.revisit_cond.wait()\n else:\n await asyncio.sleep(1)", "def test_watch_graph_caches(self):\n self.make_files(foo='foo', bar='bar')\n with pike.Graph('g') as graph:\n pike.glob('.', '*')\n watcher = pike.watch_graph(graph)\n ret = watcher.run()\n self.assertEqual(len(ret['default']), 2)\n with self.assertRaises(pike.StopProcessing):\n watcher.run()", "def reload_configurations(self) -> None:\n ...", "def test_watch_graph_changes(self):\n self.make_files(foo='foo', bar='bar')\n with pike.Graph('g') as graph:\n pike.glob('.', '*')\n watcher = pike.watch_graph(graph)\n ret = watcher.run()\n self.assertItemsEqual([f.data.read() for f in ret['default']],\n [b'foo', b'bar'])\n self.make_files(foo='foo', bar='foo')\n ret = watcher.run()\n self.assertItemsEqual([f.data.read() for f in ret['default']],\n [b'foo', b'foo'])", "def refreshSizeCrcDate(apRoot,old_sizeCrcDate,progress=None,removeEmpties=False,fullRefresh=False):\n rootIsMods = (apRoot == dirs['mods']) #--Filtered scanning for mods directory.\n norm_ghost = (rootIsMods and Installer.getGhosted()) or {}\n ghost_norm = dict((y,x) for x,y in norm_ghost.iteritems())\n rootName = apRoot.stail\n progress = progress or bolt.Progress()\n new_sizeCrcDate = {}\n bethFiles = bush.bethDataFiles\n skipExts = Installer.skipExts\n asRoot = apRoot.s\n relPos = len(apRoot.s)+1\n pending = set()\n #--Scan for changed files\n progress(0,_(\"%s: Pre-Scanning...\") % rootName)\n progress.setFull(1)\n dirDirsFiles = []\n emptyDirs = set()\n for asDir,sDirs,sFiles in os.walk(asRoot):\n progress(0.05,_(\"%s: Pre-Scanning...\\n%s\") % (rootName,asDir[relPos:]))\n if rootIsMods and asDir == asRoot:\n sDirs[:] = [x for x in sDirs if x.lower() not in Installer.dataDirsMinus]\n dirDirsFiles.append((asDir,sDirs,sFiles))\n if not (sDirs or sFiles): emptyDirs.add(GPath(asDir))\n progress(0,_(\"%s: Scanning...\") % rootName)\n progress.setFull(1+len(dirDirsFiles))\n for index,(asDir,sDirs,sFiles) in enumerate(dirDirsFiles):\n progress(index)\n rsDir = asDir[relPos:]\n inModsRoot = rootIsMods and not rsDir\n apDir = GPath(asDir)\n rpDir = GPath(rsDir)\n for sFile in sFiles:\n #print '...',sFile\n ext = sFile[sFile.rfind('.'):].lower()\n rpFile = rpDir.join(sFile)\n if inModsRoot:\n if ext in skipExts: continue\n if not rsDir and sFile.lower() in bethFiles: continue\n rpFile = ghost_norm.get(rpFile,rpFile)\n isEspm = not rsDir and (ext == '.esp' or ext == '.esm')\n apFile = apDir.join(sFile)\n size = apFile.size\n date = apFile.mtime\n oSize,oCrc,oDate = old_sizeCrcDate.get(rpFile,(0,0,0))\n if size == oSize and (date == oDate or isEspm):\n new_sizeCrcDate[rpFile] = (oSize,oCrc,oDate)\n else:\n pending.add(rpFile)\n #--Remove empty dirs?\n if settings['bash.installers.removeEmptyDirs']:\n for dir in emptyDirs: \n try: dir.removedirs()\n except OSError: pass\n #--Force update?\n if fullRefresh: pending |= set(new_sizeCrcDate)\n changed = bool(pending) or (len(new_sizeCrcDate) != len(old_sizeCrcDate))\n #--Update crcs?\n if pending:\n progress(0,_(\"%s: Calculating CRCs...\\n\") % rootName)\n progress.setFull(1+len(pending))\n try:\n us = unicode(rpFile.s, sys.getfilesystemencoding())\n except TypeError:\n us = rpFile.s\n for index,rpFile in enumerate(sorted(pending)):\n string = (_(\"%s: Calculating CRCs...\\n%s\") % \n (rootName, us)\n )\n progress(index,string)\n apFile = apRoot.join(norm_ghost.get(rpFile,rpFile))\n crc = apFile.crc\n size = apFile.size\n date = apFile.mtime\n new_sizeCrcDate[rpFile] = (size,crc,date)\n old_sizeCrcDate.clear()\n old_sizeCrcDate.update(new_sizeCrcDate)\n #--Done\n return changed", "def sync(self):\n if self.config['target_folder_id'] == '':\n self.get_list_all_folders()\n\n # Use queue to travers all folders in file tree\n queue = deque([(self.config['target_folder_id'], self.config['target_folder_name'],\n self.config['base_folder_dir'] + self.config['target_folder_name'] + '\\\\', 0)])\n\n self.BFS(queue)", "async def rebuild_virtualfs(cls):\n # todo: rebuild the image_virtual_paths table--using existing script\n # can be referenced from program config db_scripts_path\n logger = cls.get_logger()\n async with DbConnectionPool.get().acquire_dict_cursor(db=ProgramConfig.get().pwgo_db_name) as (cur,_):\n vfs_root = Path(AgentConfig.get().virtualfs_root)\n logger.debug(\"retrieving all image virtual paths from db\")\n await cur.execute(\"SELECT * FROM image_virtual_paths\")\n v_path_rows = await cur.fetchall()\n\n vfs_root_category_id = AgentConfig.get().virtualfs_category_id\n def is_in_vfs(uppercats_str):\n uppercats = [int(c.strip()) for c in uppercats_str.split(\",\")]\n return vfs_root_category_id in uppercats\n\n if vfs_root_category_id:\n # if there's a root category set then filter the returned rows\n v_path_rows = [p for p in v_path_rows if is_in_vfs(p[\"category_uppercats\"])]\n\n logger.debug(strings.LOG_VFS_REBUILD_REMOVE(vfs_root))\n if not ProgramConfig.get().dry_run:\n for file in vfs_root.files():\n file.remove()\n for directory in vfs_root.dirs():\n directory.rmtree()\n\n logger.debug(strings.LOG_VFS_REBUILD_CREATE(len(v_path_rows)))\n for row in v_path_rows:\n with Path(AgentConfig.get().piwigo_galleries_host_path):\n src_path = Path(row[\"physical_path\"]).abspath()\n if not src_path.exists():\n broken_msg = \"%s does not exist\"\n if AgentConfig.get().virtualfs_allow_broken_links:\n logger.warning(broken_msg, src_path)\n else:\n raise FileNotFoundError(broken_msg % src_path)\n with Path(AgentConfig.get().virtualfs_root):\n virt_path = Path(row[\"virtual_path\"]).abspath()\n\n if not ProgramConfig.get().dry_run and not virt_path.exists():\n virt_path.dirname().makedirs_p()\n src_path.symlink(virt_path)", "def deep_watch(self, d: Path) -> None:\n dir_links = [_ for _ in all_subdirs(d) if is_link_to_dir(_)]\n\n for watch_path in [d, *dir_links]:\n self.add_watch(\n str(watch_path),\n pyinotify.ALL_EVENTS,\n rec=True,\n )", "def set_file_paths(self, new_file_paths):\n self._file_paths = new_file_paths\n\n # clean up the queues; remove anything queued which no longer in the list, including callbacks\n self._file_path_queue = collections.deque(x for x in self._file_path_queue if x in new_file_paths)\n Stats.gauge(\"dag_processing.file_path_queue_size\", len(self._file_path_queue))\n\n callback_paths_to_del = [x for x in self._callback_to_execute if x not in new_file_paths]\n for path_to_del in callback_paths_to_del:\n del self._callback_to_execute[path_to_del]\n\n # Stop processors that are working on deleted files\n filtered_processors = {}\n for file_path, processor in self._processors.items():\n if file_path in new_file_paths:\n filtered_processors[file_path] = processor\n else:\n self.log.warning(\"Stopping processor for %s\", file_path)\n Stats.decr(\"dag_processing.processes\", tags={\"file_path\": file_path, \"action\": \"stop\"})\n processor.terminate()\n self._file_stats.pop(file_path)\n\n to_remove = set(self._file_stats).difference(self._file_paths)\n for key in to_remove:\n # Remove the stats for any dag files that don't exist anymore\n del self._file_stats[key]\n\n self._processors = filtered_processors", "def reload(self):\n if len(self.files) > 0:\n self.load(self.files, regfiles=self.regions)", "def syncfolder():", "def update_path(self):\r\n if len(self.queue) == 0:\r\n return\r\n self.path[:] = []\r\n current = self.peek_queue()[0]\r\n while current in self.previous:\r\n self.path.append(current)\r\n current = self.previous[current]", "def refresh_workdir(self) -> str:\n return self.workdir", "def update_path():\n #TODO update path information\n pass", "def refresh(path):\n if os.path.exists(path):\n os.remove(path)\n return path" ]
[ "0.61695445", "0.6074694", "0.599499", "0.5934256", "0.58923125", "0.5880447", "0.5830198", "0.5736178", "0.5667084", "0.56275195", "0.56197757", "0.5597608", "0.55753165", "0.5571855", "0.55354047", "0.5500853", "0.54532933", "0.5401339", "0.5387552", "0.53727967", "0.53642184", "0.53501177", "0.5315124", "0.53134316", "0.5310192", "0.52886313", "0.5284856", "0.5272666", "0.5266338", "0.5262172" ]
0.79090995
0
Occasionally print out stats about how fast the files are getting processed.
def _print_stat(self): if 0 < self.print_stats_interval < time.monotonic() - self.last_stat_print_time: if self._file_paths: self._log_file_processing_stats(self._file_paths) self.last_stat_print_time = time.monotonic()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_stats_thread(root):\n start = time.time()\n\n while True:\n time.sleep(3)\n timenow = time.time()\n elapsed = str(timedelta(seconds = timenow - start))\n inodesps = inodecount[root] / (timenow - start)\n logger.info('CRAWL STATS (path {0}, files {1}, dirs {2}, elapsed {3}, perf {4:.3f} inodes/s, {5} paths still scanning {6}, memory usage {7})'.format(\n root, filecount[root], dircount[root], elapsed, inodesps, len(scan_paths), scan_paths, get_mem_usage()))\n dps = total_doc_count[root] / (timenow - start)\n logger.info('ES UPLOAD STATS (path {0}, uploaded {1} docs, elapsed {2}, perf {3:.3f} docs/s)'.format(\n root, total_doc_count[root], elapsed, dps))", "def print_local_output_files_stats():\n print \"\\n\\nFILES CREATED:\"\n for filename in os.listdir('../output'):\n filesize = os.path.getsize('../output/' + filename)\n print str(filesize) + \"\\t\" + filename\n print \"\\n\"", "def print_file_stats(self):\n\n # current epoch time, file number, filename, filesize, trans secs, status\n print(f\"TRANS_STATS_FILE: {time.time()} {self.batchvals['numfiles']} {self.filevals['filename']} {self.filevals['numbytes']} {self.filevals['end_time'] - self.filevals['start_time']} {self.filevals['status']}\")", "def pass1(self, verbose):\n \n for root, dirs, files in os.walk(self.dir_to_check, topdown=False):\n t_size = 0\n for f in files:\n new_f = os.path.join(root,f) #complete path in case of homonyms\n size = os.path.getsize(new_f)\n t_size += size\n self.cache[new_f] = HumanReadableSize(size)\n t_size += sum ([self.cache[os.path.join(root,d)].val for d in dirs])\n self.cache[root] = HumanReadableSize(t_size)\n if verbose:\n print ('.................... Computing size of {}!'.format(root))\n \n #print (self.cache) #debugging", "def main():\n args = get_args()\n with profiling.profiled(enabled=args.profile_stats):\n process_files(args)", "def _iterate_over_files(self):\n stats = Statistics()\n\n args = arguments.Args()\n\n for file in args.files:\n\n if isimage(file):\n before_size = stats.calculate_before_optimization(file)\n\n puts(\"%s %s\" % (\n e(\"==>\"),\n os.path.basename(file))\n )\n\n if \"--lossy\" in args.flags:\n Optimize.lossy(file)\n if \"--lossless\" in args.flags:\n Optimize.lossless(file)\n after_size = stats.calculate_after_optimization(file)\n\n puts(\"%s %s (%s)\" % (\n p(\"<==\"),\n os.path.basename(file),\n s(after_size) if after_size < before_size else after_size\n ))\n\n stats.show_statistics()", "def stats(filename):\n from .utils import stats as print_stats\n click.echo('Starting to gather statistics on file {}'.format(filename))\n print_stats(filename)\n click.echo('Statistics printing finished')", "def _log_file_processing_stats(self, known_file_paths):\n # File Path: Path to the file containing the DAG definition\n # PID: PID associated with the process that's processing the file. May\n # be empty.\n # Runtime: If the process is currently running, how long it's been\n # running for in seconds.\n # Last Runtime: If the process ran before, how long did it take to\n # finish in seconds\n # Last Run: When the file finished processing in the previous run.\n headers = [\"File Path\", \"PID\", \"Runtime\", \"# DAGs\", \"# Errors\", \"Last Runtime\", \"Last Run\"]\n\n rows = []\n now = timezone.utcnow()\n for file_path in known_file_paths:\n last_runtime = self.get_last_runtime(file_path)\n num_dags = self.get_last_dag_count(file_path)\n num_errors = self.get_last_error_count(file_path)\n file_name = os.path.basename(file_path)\n file_name = os.path.splitext(file_name)[0].replace(os.sep, \".\")\n\n processor_pid = self.get_pid(file_path)\n processor_start_time = self.get_start_time(file_path)\n runtime = (now - processor_start_time) if processor_start_time else None\n last_run = self.get_last_finish_time(file_path)\n if last_run:\n seconds_ago = (now - last_run).total_seconds()\n Stats.gauge(f\"dag_processing.last_run.seconds_ago.{file_name}\", seconds_ago)\n\n rows.append((file_path, processor_pid, runtime, num_dags, num_errors, last_runtime, last_run))\n\n # Sort by longest last runtime. (Can't sort None values in python3)\n rows.sort(key=lambda x: x[3] or 0.0)\n\n formatted_rows = []\n for file_path, pid, runtime, num_dags, num_errors, last_runtime, last_run in rows:\n formatted_rows.append(\n (\n file_path,\n pid,\n f\"{runtime.total_seconds():.2f}s\" if runtime else None,\n num_dags,\n num_errors,\n f\"{last_runtime:.2f}s\" if last_runtime else None,\n last_run.strftime(\"%Y-%m-%dT%H:%M:%S\") if last_run else None,\n )\n )\n log_str = (\n \"\\n\"\n + \"=\" * 80\n + \"\\n\"\n + \"DAG File Processing Stats\\n\\n\"\n + tabulate(formatted_rows, headers=headers)\n + \"\\n\"\n + \"=\" * 80\n )\n\n self.log.info(log_str)", "def output_stats(self):\n elapsed = self.timer.elapsed.total_seconds()\n count = self.copied + self.errored\n total = self.total\n # Time per key in milliseconds\n avg = round(elapsed / count * 1000, 3)\n # Time remaining in seconds\n remaining = 1.0 * elapsed / count * (total - count)\n # Time remaining in minutes\n remaining = round(remaining / 60.0, 1)\n # Time taken in minutes\n elapsed = round(elapsed / 60.0, 1)\n\n self.log.info(f\"{self.prefix}: {avg}ms avg, {elapsed}min passed, \"\n f\"{remaining}min remaining. ({count:,}/{total:,})\")", "def fileCounter(directory):", "def run(self):\r\n filesizes = {}\r\n # Build up dict with key as filesize and value is list of filenames.\r\n for path, dirs, files in walk( self._path ):\r\n for filename in files:\r\n filepath = joinpath( path, filename )\r\n filesize = stat( filepath ).st_size\r\n filesizes.setdefault( filesize, [] ).append( filepath )\r\n\r\n\r\n #Compare content hash of all files which have the same size\r\n #if two or more files have same hash and size they are added to the queue \r\n for files in [ flist for flist in filesizes.values() if len(flist)>1 ]:\r\n #run over all files in dir with the same size if there is more then one\r\n duplicates = {}\r\n for filepath in files:\r\n with open( filepath ) as openfile:\r\n filehash = md5( openfile.read() ).hexdigest()\r\n if filehash not in duplicates:\r\n duplicates.setdefault(filehash, []).append (filepath)\r\n else:\r\n duplicates[filehash].append(filepath)\r\n for duplicate in [ duplicate for duplicate in duplicates.values() if len(duplicate)>1 ]:\r\n self._queue.put(duplicate)\r\n self._finished_scan[0] = 1", "def emit_metrics(self):\n parse_time = time.perf_counter() - self._parsing_start_time\n Stats.gauge(\"dag_processing.total_parse_time\", parse_time)\n Stats.gauge(\"dagbag_size\", sum(stat.num_dags for stat in self._file_stats.values()))\n Stats.gauge(\n \"dag_processing.import_errors\", sum(stat.import_errors for stat in self._file_stats.values())\n )", "def _analyze(self):\n for _, self.subdirs, files in os.walk(self.path):\n if self.p.sort:\n self.subdirs.sort()\n files.sort()\n for f in files:\n self._analyze_file(fileextlow(f), f)\n break # stop walk() from entering subdirectories\n\n self.p.nr_dirs += 1\n if self.lossless or self.compressed or self.videos:\n if self.lossless or self.compressed:\n if not self.images:\n if self.p.warn_covers:\n print(f\"{W}{self.path}{R}: no cover file\")\n self.p.nr_no_cover += 1\n elif not have_valid_cover_name(self.images):\n if self.p.warn_covers:\n print(f\"{W}{self.path}{R}: wrong cover names\")\n self.p.nr_wrong_cover_name += 1\n if self.lossless:\n if self.compressed:\n self.p.nr_mixed_lossless_compressed += 1\n else:\n self.p.nr_lossless_dirs += 1\n\n if self.cue:\n if not self.lossless:\n if self.p.warn_cue:\n print(f\"{W}{self.path}{R}: cue but no lossless files\")\n self.p.nr_lossy_cue += 1\n elif not self.compressed:\n if len(self.cue) == 1:\n self.p.nr_cue += 1\n else:\n if self.p.warn_cue:\n print(f\"{W}{self.path}{R}: {len(self.cue)} cue files\")\n self.p.nr_multiple_cue += 1\n\n self.p.nr_media_dirs += 1\n self.p.nr_lossless += len(self.lossless)\n self.p.nr_compressed += len(self.compressed)\n self.p.nr_video_files += len(self.videos)\n self.p.nr_ignored += self.ignored\n self.p.unknown.update(self.unknown)\n else:\n if self.images and not self.subdirs:\n self.p.nr_only_images += 1\n else:\n self.p.nr_no_media_dirs += 1", "def performance_stats(self):\n current_status = psutil.STATUS_DEAD\n try:\n current_status = self.process.status()\n except psutil.NoSuchProcess:\n pass\n\n self.process_manager.handle_status_change(self.process_index, round(self.ioloop.time(), 2), current_status)\n\n if current_status != psutil.STATUS_DEAD:\n self.ioloop.call_later(0.5, self.performance_stats)", "def analyze_all(datadir, TPQI_starts, dataruns, save = 1, lower = 38.4):\n dirs = os.listdir(datadir)\n idx = 0\n right_dirs = list()\n\n\n for l in dataruns:\n for k in arange(len(dirs)):\n mark_right = '_interference_'+num2str(l,0) in dirs[k]\n \n if mark_right and (len(dirs[k]) > len('_interference_'+num2str(l,0))+6):\n mark_right = False\n\n if mark_right:\n right_dirs.append(dirs[k])\n idx += 1\n continue\n\n \n if len(right_dirs) == 0:\n print 'Did not find any files'\n\n if len(dataruns) == len(right_dirs):\n print 'Found all files...'\n else:\n print 'Beware, not all files are taken into account, file(s) missing.'\n \n tail_over_time = zeros(len(right_dirs))\n tpqi_starts = TPQI_starts[dataruns]\n statistics_info = zeros([len(right_dirs),4])\n \n for k in arange(len(right_dirs)):\n tail_over_time[k] = tail_cts_per_shot(datapath = datadir+'\\\\'+right_dirs[k], lower = lower, TPQI_starts = tpqi_starts[k], save = save)\n statistics_info[k,:] = analyze_thresholds(datapath = datadir+'\\\\'+right_dirs[k], threshold_lt1 = 0, threshold_lt2 = 9, normalize = True, save = save)\n\n\n os.chdir(datadir)\n percentage_finished = float(k+1)/len(right_dirs)*100\n print 'finished: '+num2str(percentage_finished,0)+'%'\n\n\n if save:\n times_passed_overall_lt1 = statistics_info[:,0]\n times_passed_after_seq_lt1 = statistics_info[:,1]\n times_passed_overall_lt2 = statistics_info[:,2]\n times_passed_after_seq_lt2 = statistics_info[:,3]\n filename = 'statistics_run_'+num2str(dataruns.min(),0)+'_to_'+num2str(dataruns.max(),0)+'.npz' \n savez(filename, tpqi_starts = tpqi_starts, tail_over_time = tail_over_time,\n times_passed_overall_lt1 = times_passed_overall_lt1, \n times_passed_after_seq_lt1 = times_passed_after_seq_lt1, \n times_passed_overall_lt2 = times_passed_overall_lt2,\n times_passed_after_seq_lt2 = times_passed_after_seq_lt2)\n\n \n\n figure3 = plt.figure(figsize=(12.0, 16.0))\n plt.subplot(211)\n plt.plot(dataruns,tail_over_time*1E4, '-k')\n plt.xlabel('TPQI run number')\n plt.ylabel('Tail counts per shot (x 1E-4)')\n plt.grid()\n plt.ylim([0,1.1*max(tail_over_time*1E4)])\n\n plt.subplot(212)\n plt.plot(dataruns,TPQI_starts[0:len(right_dirs)], '-k')\n plt.xlabel('TPQI run number')\n plt.ylabel('TPQI starts per run')\n plt.grid()\n plt.ylim([0, 1.1*TPQI_starts[0:len(right_dirs)].max()])\n if save:\n figure3.savefig('tpqi_starts_and_tail_over_time.png')", "def __collect_stats(self, encode, file_name):\n if encode not in self.__hash.keys():\n self.__hash[encode] = []\n self.__hash[encode].append(file_name)\n self.__files_count += 1\n with open(file_name, 'r', encoding=encode) as fr:\n for line in fr:\n self.__lines += 1\n self.__chars += len(line)", "def Results(self):\r\n try:\r\n numOfFiles = 0\r\n file = str(filenames).split(',')\r\n for file in filenames:\r\n if os.path.exists(file):\r\n numOfFiles += 1\r\n print('%d' % numOfFiles + ' videos resized!')\r\n info = 'totaltime: ' + str(datetime.timedelta(seconds=totaltime))\r\n print(info)\r\n except NameError:\r\n info = ''\r\n print('no totaltime passed')\r\n return info", "def stats_compute(self, *args, **kwargs):\n totalElements = 0\n totalKeys = 0\n totalSize = 0\n l_stats = []\n d_report = {}\n str_report = \"\"\n l_range = []\n\n if int(self.verbosityLevel) and self.toConsole():\n l_range = tqdm(sorted(self.d_inputTreeCallback.items(),\n key = lambda kv: (kv[1]['diskUsage_raw']),\n reverse = self.b_statsReverse),\n desc = ' Processing stats')\n else:\n l_range = sorted(self.d_inputTreeCallback.items(),\n key = lambda kv: (kv[1]['diskUsage_raw']),\n reverse = self.b_statsReverse)\n\n for k, v in l_range:\n try:\n if not self.args['du'] and not self.args['duf']:\n str_report += \"files: %5d│ raw_size: %12d│ human_size: %8s│ dir: %s\\n\" % (\\\n len(self.d_inputTree[k]),\n self.d_inputTreeCallback[k]['diskUsage_raw'],\n self.d_inputTreeCallback[k]['diskUsage_human'],\n k)\n else:\n str_report += '%-10s%s\\n' % (\n self.d_inputTreeCallback[k]['diskUsage_human'], k)\n except:\n pass\n d_report = {\n 'files': len(self.d_inputTree[k]),\n 'diskUsage_raw': self.d_inputTreeCallback[k]['diskUsage_raw'],\n 'diskUsage_human': self.d_inputTreeCallback[k]['diskUsage_human'],\n 'path': k\n }\n l_stats.append(d_report)\n totalElements += len(v)\n totalKeys += 1\n totalSize += self.d_inputTreeCallback[k]['diskUsage_raw']\n str_totalSize_human = self.sizeof_fmt(totalSize)\n return {\n 'status': True,\n 'report': str_report,\n 'dirs': totalKeys,\n 'files': totalElements,\n 'totalSize': totalSize,\n 'totalSize_human': str_totalSize_human,\n 'l_stats': l_stats,\n 'runTime': other.toc()\n }", "def process_stat_files(param):\n\n #get the files that are actually in the output directory\n call = ['cp', '-R']\n call.append(param['working_dir']+'results/featureCount/')\n call.append(param['working_dir']+'report/')\n _, _ = subprocess.Popen(call,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE).communicate()\n\n featurecount_file = (param['working_dir']+\n 'results/featureCount/featureCount_stats.txt')\n #extract table\n table = []\n filehandle = open(featurecount_file)\n #header\n table.append(filehandle.readlines()[0].rstrip().split('\\t'))\n table[0] = table[0][1:]\n filehandle.close()\n\n #total number of aligned reads\n tot_reads = param['bam_qc']['unique_aligned_reads']\n counter = [0] * len(param['bam_qc']['unique_aligned_reads'])\n \n filehandle = open(featurecount_file)\n for line in filehandle.readlines()[1:]:\n cur_line = line.rstrip().split('\\t')\n cur_line[0] = re.sub(r'_',' ',cur_line[0])\n if cur_line[0] not in ['Unassigned MultiMapping','Assigned']:\n counter = [ct + int(cr) for ct, cr in zip(counter, cur_line[1:])]\n perc = ([cur_line[0]]+\n MODULE_HELPER.get_percentage(cur_line[1:],\n tot_reads,\n len(cur_line)-1))\n table.append(perc)\n filehandle.close()\n assigned = [tot_reads[idx] - counter[idx] for idx in range(len(tot_reads))]\n perc = ['Assigned'] + MODULE_HELPER.get_percentage(assigned,\n tot_reads,\n len(counter))\n return table", "def process(self, args):\n for benchmark_file in args.benchmark_files:\n self.process_individual_file(benchmark_file)\n self.total_files += 1", "def process_file(self):\n self._processing_logger.log_info('Start processing')\n self.parsing_start_time = datetime.datetime.now()\n if os.path.exists(self.tmp_stat_file_path) \\\n and not HcsParsingUtils.active_processing_exceed_timeout(self.tmp_stat_file_path):\n self._processing_logger.log_info('This file is processed by another parser, skipping...')\n return 2\n self.create_tmp_stat_file()\n hcs_index_file_path = self.hcs_root_dir + MEASUREMENT_INDEX_FILE_PATH\n time_series_details = self._extract_time_series_details(hcs_index_file_path)\n self.generate_ome_xml_info_file()\n xml_info_tree = ET.parse(self.ome_xml_info_file_path).getroot()\n plate_width, plate_height = self._get_plate_configuration(xml_info_tree)\n wells_tags = self.read_wells_tags()\n if wells_tags:\n self._processing_logger.log_info(\"Tags \" + str(wells_tags))\n if not TAGS_PROCESSING_ONLY and not EVAL_PROCESSING_ONLY:\n if not self._localize_related_files():\n self._processing_logger.log_info('Some errors occurred during copying files from the bucket, exiting...')\n return 1\n else:\n self._processing_logger.log_info('Localization is finished.')\n local_preview_dir = os.path.join(self.tmp_local_dir, 'preview')\n hcs_local_index_file_path = get_path_without_trailing_delimiter(self.tmp_local_dir) \\\n + MEASUREMENT_INDEX_FILE_PATH\n for sequence_id, timepoints in time_series_details.items():\n self._processing_logger.log_info('Processing sequence with id={}'.format(sequence_id))\n sequence_index_file_path = self.extract_sequence_data(sequence_id, hcs_local_index_file_path)\n conversion_result = os.system('bash \"{}\" \"{}\" \"{}\" {}'.format(\n OME_TIFF_SEQUENCE_CREATION_SCRIPT, sequence_index_file_path, local_preview_dir, sequence_id))\n if conversion_result != 0:\n self._processing_logger.log_info('File processing was not successful...')\n return 1\n sequence_overview_index_file_path, wells_grid_mapping = self.build_sequence_overview_index(sequence_index_file_path)\n conversion_result = os.system('bash \"{}\" \"{}\" \"{}\" {} \"{}\"'.format(\n OME_TIFF_SEQUENCE_CREATION_SCRIPT, sequence_overview_index_file_path, local_preview_dir,\n sequence_id, 'overview_data.ome.tiff'))\n if conversion_result != 0:\n self._processing_logger.log_info('File processing was not successful: well preview generation failure')\n return 1\n self.write_dict_to_file(os.path.join(local_preview_dir, sequence_id, 'wells_map.json'),\n self.build_wells_map(sequence_id, wells_grid_mapping, wells_tags))\n if LOCALIZE_USE_PIPE == \"true\":\n cloud_transfer_result = os.system('pipe storage cp -f -r \"{}\" \"{}\"'\n .format(local_preview_dir,\n HcsParsingUtils.extract_cloud_path(self.hcs_img_service_dir)))\n else:\n cloud_transfer_result = os.system('aws s3 sync \"{}\" \"{}\"'\n .format(local_preview_dir,\n HcsParsingUtils.extract_cloud_path(self.hcs_img_service_dir)))\n if cloud_transfer_result != 0:\n self._processing_logger.log_info('Results transfer was not successful...')\n return 1\n self._write_hcs_file(time_series_details, plate_width, plate_height)\n if not EVAL_PROCESSING_ONLY:\n tags_processing_result = self.try_process_tags(xml_info_tree, wells_tags)\n if TAGS_PROCESSING_ONLY:\n if wells_tags:\n for sequence_id, timepoints in time_series_details.items():\n path = os.path.join(self.hcs_img_service_dir, sequence_id, 'wells_map.json')\n self.write_dict_to_file(path, self.update_wells_json(path, wells_tags))\n return tags_processing_result\n if not TAGS_PROCESSING_ONLY:\n eval_processing_result = self.try_process_eval()\n if EVAL_PROCESSING_ONLY:\n return eval_processing_result\n self.create_stat_file()\n return 0", "def slow_update_duration(self):\n for i in range(len(self.data_file.sorted_data)):\n if self.data_file.sorted_data[i]['type'] == 'slow':\n slow_upd = self.data_file.sorted_data[i]['timestamp']\n Config.ANALYSIS.write(f\"slow at: {slow_upd}\\n\")\n if i == 0:\n after_slow = self.data_file.sorted_data[i + 1]['timestamp']\n Config.ANALYSIS.write(f\"after slow: ({slow_upd}, {after_slow}) \"\n f\"= {after_slow - slow_upd}\\n\\n\")\n elif i == len(self.data_file.sorted_data) - 1:\n before_slow = self.data_file.sorted_data[i - 1]['timestamp']\n Config.ANALYSIS.write(f\"before slow: ({before_slow}, {slow_upd}) \"\n f\"= {slow_upd - before_slow}\\n\\n\")\n else:\n before_slow = self.data_file.sorted_data[i - 1]['timestamp']\n after_slow = self.data_file.sorted_data[i + 1]['timestamp']\n Config.ANALYSIS.write(f\"before slow: ({before_slow}, {slow_upd}) \"\n f\"= {slow_upd - before_slow}\\n\")\n Config.ANALYSIS.write(f\"after slow: ({slow_upd}, {after_slow}) \"\n f\"= {after_slow - slow_upd}\\n\\n\")\n Config.ANALYSIS.write(\"\\n\\n\")", "def analyze_files(self):\n for file in os.listdir(self.directory):\n if file[-3:] == (\".py\"):\n fopen = open(os.path.join(self.directory, file), \"r\")\n try:\n if not (py_file := fopen):\n raise FileNotFoundError\n\n with py_file: # close file after opening\n class_count: int = 0\n fun_count: int = 0\n l_count: int = 0\n ch_count: int = 0\n for line in py_file: # calculate values for the file\n if line.strip().startswith(\"class \"):\n class_count = class_count+1\n elif line.strip().startswith(\"def \"):\n fun_count = fun_count+1\n\n l_count = l_count+1\n ch_count = ch_count+len(line)\n\n self.files_summary[str(os.path.join(self.directory, file))] = {\"class\": class_count, \"function\": fun_count, \"line\": l_count,\n \"char\": ch_count}\n except FileNotFoundError:\n print(f\"File {py_file} is not found or can not be opened\")\n fopen.close()", "def MainStats(path, filetype, NrExp, col, start, stop):\n# path= path.split('/') # here is better to google and see what is going on. Or experiment alone\n# path= \"/\".join(path[:-1]) \n dato=ExtractData_raw_files(path, filetype)\n dBase=dato.createDictBase()\n stats = Stats(dBase, NrExp, col, start, stop)\n means, stds=stats.Means_Stds()\n times = stats.time_return()\n return means , stds, times", "def stats_freq():\n\n # Get a worker number to position the progress bar\n global idxQueue\n thr_idx = idxQueue.get()\n\n setproctitle(f\"RNANet statistics.py Worker {thr_idx+1} stats_freq()\")\n\n # Initialize a Counter object for each family\n freqs = {}\n for f in famlist:\n freqs[f] = Counter()\n\n # List all nt_names happening within a RNA family and store the counts in the Counter\n for f in tqdm(famlist, position=thr_idx+1, desc=f\"Worker {thr_idx+1}: Base frequencies\", unit=\"family\", leave=False):\n with sqlite3.connect(runDir + \"/results/RNANet.db\") as conn:\n conn.execute('pragma journal_mode=wal')\n counts = dict(sql_ask_database(conn, f\"SELECT nt_name, COUNT(nt_name) FROM (SELECT chain_id from chain WHERE rfam_acc='{f}') NATURAL JOIN nucleotide GROUP BY nt_name;\", warn_every=0))\n freqs[f].update(counts)\n \n # Create a pandas DataFrame, and save it to CSV.\n df = pd.DataFrame()\n for f in tqdm(famlist, position=thr_idx+1, desc=f\"Worker {thr_idx+1}: Base frequencies\", unit=\"family\", leave=False):\n tot = sum(freqs[f].values())\n df = pd.concat([ df, pd.DataFrame([[ format_percentage(tot, x) for x in freqs[f].values() ]], columns=list(freqs[f]), index=[f]) ])\n df = df.fillna(0)\n df.to_csv(runDir + \"/results/frequencies.csv\") \n idxQueue.put(thr_idx) # replace the thread index in the queue\n setproctitle(f\"RNANet statistics.py Worker {thr_idx+1} finished\")\n # notify(\"Saved nucleotide frequencies to CSV file.\")", "def main(output_file):\n with open(output_file, 'w+') as fl:\n poor_perf_stats = pstats.Stats('poor_perf.log', stream=fl)\n good_perf_stats = pstats.Stats('good_perf.log', stream=fl)\n\n poor_perf_stats.sort_stats('cumtime')\n\n fl.write('--------------------------------------------\\n')\n fl.write('POOR PERFORMANCE STATS\\n')\n fl.write(f\"Time: {poor_perf_stats.total_tt}\\n\")\n fl.write(f\"Function Calls: {poor_perf_stats.total_calls}\\n\")\n fl.write(f\"Top cumulative times\\n\")\n poor_perf_stats.print_stats(20)\n\n fl.write('--------------------------------------------\\n')\n fl.write('GOOD PERFORMANCE STATS\\n')\n fl.write(f\"Time: {good_perf_stats.total_tt}\\n\")\n fl.write(f\"Function Calls: {good_perf_stats.total_calls}\\n\")\n fl.write(f\"Top 20 cumulative times\\n\")\n good_perf_stats.print_stats(20)", "def batch_analyze_wav(self, filePaths):\n\n toCSV = self.settings['output']['toCSV']\n toJSON = self.settings['output']['toJSON']\n\n start = time.time()\n\n fileTotal = 0\n for path in filePaths:\n if os.path.isdir(path):\n blockName = os.path.basename(path)\n print(f'Block: {blockName}')\n\n files = [os.path.join(path, file) for file in os.listdir(path) if '.wav' in file]\n fileTotal += len(files)\n\n if toCSV:\n if not os.path.exists(os.path.join(path, 'fft_results_csv')):\n os.makedirs(os.path.join(path, 'fft_results_csv'))\n resultFilePath = os.path.join(path, 'fft_results_csv')\n\n print('Processing FFTs...')\n with multiprocessing.Pool(processes=4) as pool:\n results = pool.starmap(Utils.AnalyzeFFT, zip(files, itertools.repeat(True),\n itertools.repeat(True)))\n results = [result for result in results if result is not None]\n\n peaks = [result[0] for result in results]\n ffts = [result[1] for result in results]\n\n print('Writing to .csv...')\n resultFileName = os.path.join(resultFilePath, f'{blockName}_Peaks.csv')\n peakFrames = pd.concat(peaks)\n peakFrames.to_csv(resultFileName, index=False, header=True)\n with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:\n executor.map(self.multi_csv_write, ffts)\n\n if toJSON:\n if not os.path.exists(os.path.join(path, 'fft_results_json')):\n os.makedirs(os.path.join(path, 'fft_results_json'))\n print(os.path.join(path, 'fft_results_json'))\n\n print('Processing FFTs...')\n with multiprocessing.Pool(processes=4) as pool:\n results = pool.starmap(Utils.AnalyzeFFT, zip(files, itertools.repeat(True),\n itertools.repeat(False),\n itertools.repeat(True)))\n results = [result for result in results if result is not None]\n\n print('Writing to .json...')\n with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:\n executor.map(self.multi_json_write, results)\n\n end = time.time()\n print(f'**Done!** {len(filePaths)} blocks with {fileTotal} files took {round(end-start, 1)}s')", "def statistics(self):\n \n u_self = resource.getrusage(resource.RUSAGE_SELF)\n\tu_children = resource.getrusage(resource.RUSAGE_CHILDREN)\n\t\n\tpath = os.getenv('TMPDIR')\n\tif not path:\n\t path = os.getcwd()\n\t \n\tdisk = 0 \n\tfor root, dirs, files in os.walk(path): \n\t for d in dirs+files:\n\t disk += os.stat(os.path.join(root, d)).st_size\n\n return dict(\n\t cpu = u_self[0]+u_self[1]+u_children[0]+u_children[1],\n\t memory = (u_self[2]+u_children[2])*resource.getpagesize(),\n\t disk = disk,\n\t time = self.elapsed_time(),\n\t signal = self.signal\n\t)", "def update_freq_dist(filename):\r\n pass", "def output_statistics(self, run_time):\n fps = self.received_frames / run_time\n MBps_per_frame = self.full_frame_length() / 1000.0 / 1000.0\n print '\\nRun time: %.2f seconds' % (run_time,)\n print 'Received frames: ', self.received_frames\n print 'Avg. frame rate: %s fps' % (fps,)\n print 'Avg. Bit rate: %.2f MB/s' % (MBps_per_frame * fps,)" ]
[ "0.6778385", "0.66478163", "0.6588825", "0.6544332", "0.649206", "0.6388902", "0.63528067", "0.62239826", "0.62017673", "0.6104883", "0.6089805", "0.6086532", "0.60859054", "0.606557", "0.6016941", "0.59960234", "0.5962753", "0.5934713", "0.59274864", "0.5912956", "0.589714", "0.5880039", "0.58712393", "0.586645", "0.5840986", "0.58176386", "0.5813088", "0.58052933", "0.58011645", "0.57792675" ]
0.6904008
0
Clear import errors for files that no longer exist.
def clear_nonexistent_import_errors(file_paths: list[str] | None, session=NEW_SESSION): query = session.query(errors.ImportError) if file_paths: query = query.filter(~errors.ImportError.filename.in_(file_paths)) query.delete(synchronize_session="fetch") session.commit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear_errors(self) -> None:", "def clear_errors(self) -> None:", "def test_dupe_imports(self):\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._delicious_data_test()", "def clean(self) -> None:\n # remove all *.py and *.pyi files in the folder\n for wc in [\"*.py\", \"*.pyi\", \"modules.json\"]:\n for f in (self.package_path).rglob(wc):\n f.unlink()", "def reset(self):\n def remove_auxiliary_dir():\n egg_info_dir = self.project_name_sc + \".egg-info\"\n remove_directories([\n egg_info_dir,\n \".env\",\n \".eggs\",\n \".pytest_cache\",\n \"build\",\n \"dist\",\n \".cache\",\n \".benchmark\",\n \".tox\",\n \".vagrant\",\n \".tox\"])\n remove_files([\n \".coverage\",\n \".doit.db\",\n \".doit.bak\",\n \".doit.dat\",\n \".doit.dir\",\n ])\n\n # TODO(lschneider): Remove unnecessary files without command lines.\n # This code could be run directly from this function. However\n # the pathlib library is not part of the standard python 2.\n prefix = \"python -c \\\"import pathlib; \"\n delete_pyfiles = prefix + \"import pathlib; [p.unlink() for p in pathlib.Path('.').rglob('*.py[co]')]\\\"\"\n delete_dirs = prefix + \"import pathlib; [p.rmdir() for p in pathlib.Path('.').rglob('__pycache__')]\\\"\"\n\n return {\n \"actions\": [\n delete_pyfiles,\n delete_dirs,\n remove_auxiliary_dir,\n ],\n \"verbosity\": 2\n }", "def clean_files(self):\n self.filenames.clear()", "def test_dupe_imports(self):\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # Now let's do some db sanity checks.\r\n self._delicious_xml_data_test()", "def cleanUp(self):\n import evoware.fileutil as F\n F.tryRemove(self.f_project, verbose=(self.VERBOSITY>1), tree=1)", "def tearDown(self):\n builtins.__import__ = self.original_imports", "def clean(ctx):\n ctx.run(\n \"find . -type f -name '*.pyc' -delete && \"\n \"find . -type f -name '*.pyo' -delete && \"\n \"rm -rf .pytest_cache && \"\n \"rm -rf .mypy_cache\"\n )", "def clean_up(self):\n self.fname = None\n self.failed_files = []\n self.custom_failed = []\n self.results = None", "def clean_python(context):\n context.run(\"find . -name '*.pyc' -exec rm -f {} +\")\n context.run(\"find . -name '*.pyo' -exec rm -f {} +\")\n context.run(\"find . -name '*~' -exec rm -f {} +\")\n context.run(\"find . -name '__pycache__' -exec rm -fr {} +\")", "def cleanup_import(package_name):\n\n for k in list(sys.modules):\n if not isinstance(k, str):\n # Some things will actually do this =_=\n continue\n elif k.startswith('astropy_helpers.tests'):\n # Don't delete imported test modules or else the tests will break,\n # badly\n continue\n if k == package_name or k.startswith(package_name + '.'):\n del sys.modules[k]", "def dev_clean():\n clean_files(\"csv\", True)\n clean_files(\"jsontxt\", True)", "def __clearAllSyntaxErrors(self):\n for editor in self.editors:\n editor.clearSyntaxError()", "def fix_nonerrors(self):\n if not self.only_error:\n return\n self.line = None\n self.filename = None", "def cleanup() -> None:\n\n for fname in glob(os.path.join(tdir, 'alexandria.*')):\n if os.path.splitext(fname)[1] not in {'.c', '.h'}:\n os.unlink(fname)", "def clean():\n clean_files()", "def clear_files_paths(self):\n del self.__files_paths[:]", "def remove_collector_imports(self):\n with open(self.filename, \"r+\") as code_file:\n content = code_file.read()\n # Delete file content so the file won't be a mess\n code_file.seek(0)\n code_file.truncate()\n # clean_content will store the content without the import lines.\n clean_content = content\n collector_import_lines = f\"{self.IMPORT_COLLECTOR_LINE}\\n{self.EXPLICIT_DECLARATION_IMPORTS_LINE}\\n\\n\"\n if content.startswith(collector_import_lines):\n logger.debug(\"Removing added import lines.\")\n # Split the content to the parts before and after the collector_import_lines\n content_parts = content.split(collector_import_lines)\n # Restore content to previous form and ignore the first found import lines.\n clean_content = f\"{collector_import_lines}\".join(content_parts[1:])\n\n code_file.write(clean_content)", "def cleanup(self):\n files = self.nlst()\n latest = self.latest_filename\n for filename in files:\n if filename != latest:\n result = self.delete(filename)\n logger.info(f\"Deleted old export from FTP: {result}\")", "def clear_error(self):\n self.got_error = False", "def clearWarnings():\n for name, mod in list(sys.modules.items()):\n try:\n reg = getattr(mod, \"__warningregistry__\", None)\n except ImportError:\n continue\n if reg:\n reg.clear()", "def cleanup_files(self):\n\n self.backup_files()\n self.delete_files()", "def clean():\n for dirpath, dirnames, filenames in os.walk('.'):\n for filename in filenames:\n if filename.endswith('.pyc') or filename.endswith('.pyo'):\n full_pathname = os.path.join(dirpath, filename)\n click.echo('Removing {}'.format(full_pathname))\n os.remove(full_pathname)", "def clean():\n C.libs.clear()\n shutil.rmtree(C.cache_dir, ignore_errors=True)", "def module_cleanup():\n from bokeh.core.has_props import _default_resolver\n to_reset = list(panel_extension._imports.values())\n\n _default_resolver._known_models = {\n name: model for name, model in _default_resolver._known_models.items()\n if not any(model.__module__.startswith(tr) for tr in to_reset)\n }", "def clean_errors(self):\n self._vim.eval('clearmatches()')\n self._errors = []\n self._matches = []\n # Reset Syntastic notes - TODO: bufdo?\n self._vim.current.buffer.vars['ensime_notes'] = []", "def clean(self):\n os.remove(\"temp.py\") # Delete the file \"temp.py\", to free up disk space", "def clean_python(c):\n c.run(\"find . -name '*.pyc' -exec rm -f {} +\")\n c.run(\"find . -name '*.pyo' -exec rm -f {} +\")\n c.run(\"find . -name '*~' -exec rm -f {} +\")\n c.run(\"find . -name '__pycache__' -exec rm -fr {} +\")" ]
[ "0.6780713", "0.6780713", "0.6319068", "0.625522", "0.6212573", "0.6209276", "0.61864525", "0.613826", "0.6076129", "0.6056241", "0.6041389", "0.59878427", "0.59337693", "0.5921071", "0.5900005", "0.5886828", "0.5866447", "0.5844186", "0.58224547", "0.5803731", "0.58004135", "0.57782495", "0.5773853", "0.5769499", "0.57691014", "0.5766727", "0.5763704", "0.5762973", "0.5760281", "0.5757338" ]
0.7748205
0
Retrieve the PID of the process processing the given file or None if the file is not being processed.
def get_pid(self, file_path) -> int | None: if file_path in self._processors: return self._processors[file_path].pid return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getPid(self):\n try:\n fh = open(self.filename)\n except OSError:\n return None\n line = fh.readline()\n try:\n return string.atoi(line) # trailing newline doesn't matter\n except ValueError:\n return None", "def get_pid(self):\n try:\n pf = open(self.pidfile,'r')\n pid = int(pf.read().strip())\n pf.close()\n except (IOError, TypeError):\n pid = None\n return pid", "def pid(self):\n # type: () -> Optional[int]\n try:\n return self._process.pid # type: ignore # pylint: disable=no-member\n except:\n return None", "def get_pid(self):\n\t\ttry:\n\t\t\tpid_file = open(self.pid_file_path, 'r');\n\t\t\tpid = int(pid_file.read().strip())\n\t\t\tpid_file.close()\n\t\texcept:\n\t\t\tpid = -1;\n\t\treturn pid", "def get_pid(self):\n if self.status():\n file = open(os.path.join(self.data_dir, 'postmaster.pid'))\n pid = int(file.readline())\n return pid\n else:\n return None", "def get_pidfile(self):\n pid = None\n \n # checking if the file exists on system\n if not os.path.exists(self._pidfile):\n return pid\n \n # read the pid\n with open(self._pidfile, 'r') as f:\n pid = int(f.read().strip())\n\n return pid", "def status(pid_file):\n if not os.path.exists(pid_file):\n return None\n\n pid = None\n with open(pid_file, \"r\") as pf:\n pid = pf.read().strip()\n\n if not pid:\n logger.error(\"Unable to retrieve pid from %s\" % pid_file)\n return None\n\n if not pid.isdigit():\n logger.error(\"Invalid pid %s read from %s\" % (pid, pid_file))\n return None\n\n pid = int(pid)\n\n try:\n # Send 0 signal to check if the process is alive.\n os.kill(pid, 0)\n except OSError as e:\n logger.debug(\"%s\" % e, exc_info=True)\n return None\n return pid", "def get_ts_pid(pidfile):\n try:\n with open(pidfile) as f:\n pid = f.readline()\n if pid.strip().isdigit():\n pid = int(pid.strip())\n else:\n LOG.warning(\"Unable to read pidfile %s file contains %r; process metrics will fail!\", pidfile, pid)\n pid = None\n except EnvironmentError:\n LOG.warning(\"Unable to read pidfile %s; process metrics will fail!\", pidfile)\n pid = None\n return pid", "def pid(self):\n if self.proc is None:\n return 0\n return self._pid()", "def pid(self):\n\n return getpid() if self.__process is None else self.__process.pid", "def process(self) -> Union['psutil.Process', None]:\n psutil = attempt_import('psutil')\n pid = self.pid\n if pid is None:\n return None\n if not '_process' in self.__dict__ or self._process.pid != int(pid):\n try:\n self._process = psutil.Process(int(pid))\n except Exception as e:\n if self.pid_path.exists():\n self.pid_path.unlink()\n return None\n return self._process", "def __read_pidfile(self):\n try:\n pf = file(self.pidfile, 'r')\n contents = pf.read().strip().split()\n pf.close()\n except IOError:\n return None\n\n pid = int(contents[0])\n try:\n os.kill(pid, 0)\n except OSError, e:\n # ESRCH indicates the process is not running, in which case we ignore the pidfile.\n if e.errno == errno.ESRCH:\n return None\n # EPERM indicates the current user does not have permission to signal the process.. so it exists\n # but may not be the agent process. We will just try our /proc/pid/commandline trick below if we can.\n elif e.errno != errno.EPERM:\n raise e\n\n # If we got here, the process is running, and we have to see if we can determine if it is really the\n # original agent process. For Linux systems with /proc, we see if the commandlines match up.\n # For all other Posix systems, (Mac OS X, etc) we bail for now.\n if not self.__can_read_command_line(pid):\n return pid\n\n # Handle the case that we have an old pid file that didn't have the commandline right into it.\n if len(contents) == 1:\n return pid\n\n command_line = self.__read_command_line(pid)\n if contents[1] == command_line:\n return pid\n else:\n return None", "def get_pid(pidfile):\n pid = None\n if os.path.exists(pidfile):\n with open(pidfile, 'r') as f:\n pid = f.read()\n return pid", "def pid(self):\n return self._process.pid", "def pidof(processname = None):\n processname = os.path.basename(processname)\n pidpath = os.path.join(pid_path,processname + \".pid\")\n if processname is not None and os.path.exists(pidpath):\n f = open (pidpath)\n pids = f.readlines()\n f.close()\n return pids\n else:\n return False", "def pid(self) -> str:\n if not self.pid_path.exists():\n return None\n try:\n with open(self.pid_path, 'r') as f:\n text = f.read()\n except Exception as e:\n warn(e)\n text = None\n return text.rstrip('\\n') if text is not None else text", "def getStepPID(stepSpace, stepName):\n currDir = stepSpace.location\n pidFile = os.path.join(currDir, 'process_id')\n if not os.path.isfile(pidFile):\n msg = \"Could not find process ID file for step %s\" % stepName\n logging.error(msg)\n return\n\n with open(pidFile, 'r') as filehandle:\n output = filehandle.read()\n\n try:\n stepPID = int(output)\n except ValueError:\n msg = \"Couldn't find a number\"\n logging.error(msg)\n return None\n\n return stepPID", "def pid(self):\n return self._get_process_id()", "def get_pid_processor(pid):\n if pid != 0:\n pid_str = pid\n else:\n pid_str = 'self'\n\n # read procfs /proc/PID/stat file to get info about processor\n # that PID was scheduled on last time\n try:\n with open(\"/proc/{}/stat\".format(pid_str)) as stat_file:\n proc_stat = stat_file.readline().strip().split(' ')\n return int(proc_stat[39])\n except EnvironmentError:\n return -1", "def read_pid(self):\n result = read_pid_from_pidfile(self.path)\n return result", "def _get_pid(self):\n ps_txt = six.ensure_str(self.controller.run(\n args=[\"ps\", \"ww\", \"-u\"+str(os.getuid())]\n ).stdout.getvalue()).strip()\n lines = ps_txt.split(\"\\n\")[1:]\n\n for line in lines:\n if line.find(\"ceph-{0} -i {1}\".format(self.daemon_type, self.daemon_id)) != -1:\n log.info(\"Found ps line for daemon: {0}\".format(line))\n return int(line.split()[0])\n log.info(\"No match for {0} {1}: {2}\".format(\n self.daemon_type, self.daemon_id, ps_txt\n ))\n return None", "def getDisplay(self):\n m = _Regexps.pidFile.match(os.path.basename(self.filename))\n if m:\n return m.group(1)\n else:\n return None", "def get_pid(name):\n try: \n for process in psutil.process_iter():\n try:\n proc = process.as_dict(attrs=['pid', 'name'])\n if name in proc['name']:\n pid = proc['pid']\n logging.info(f\"Found PID {pid} for {name}\")\n return int(pid) \n except (psutil.NoSuchProcess, psutil.AccessDenied , psutil.ZombieProcess) :\n pass \n except Exception as e:\n logging.exception(f\"EXCEPTION: {e} \\n Full stack trace: \\n\", exc_info=1)", "def process_id(self):\n return self._process_id", "def process_id(self):\n return self._process_id", "def read_pid_from_pidfile(pidfile_path):\n pid = None\n try:\n pidfile = open(pidfile_path, 'r')\n except IOError:\n pass\n else:\n line = pidfile.read().strip()\n try:\n pid = int(line)\n except ValueError:\n pass\n pidfile.close()\n\n return pid", "def get_daemon_pid():\n try:\n return _get_pid_from_pidfile()\n except (FileNotFoundError, ValueError):\n return None", "def pid(self):\n return self._query_status()['pid']", "def get_pid_filename(process_name):\n return os.path.join(settings.settings['pid_directory'], context.process_context[process_name].pid_filename)", "def get_process_pid(robot_name):\n\n try:\n result = check_output(['pgrep', 'x{0}'.format(robot_name)])\n return int(result.strip())\n except:\n return None" ]
[ "0.8067979", "0.74126506", "0.7294264", "0.72064096", "0.7060661", "0.70579207", "0.70573556", "0.6864191", "0.68129", "0.6786206", "0.6730113", "0.6702546", "0.66658545", "0.6574954", "0.64804435", "0.64639413", "0.6415075", "0.6396912", "0.6365991", "0.62697995", "0.62601995", "0.62597376", "0.62183475", "0.6166127", "0.6166127", "0.61231196", "0.60107094", "0.6000392", "0.5948681", "0.59246373" ]
0.8661299
0
Retrieve the last processing time of a specific path.
def get_last_runtime(self, file_path) -> float | None: stat = self._file_stats.get(file_path) return stat.last_duration.total_seconds() if stat and stat.last_duration else None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_last_finish_time(self, file_path) -> datetime | None:\n stat = self._file_stats.get(file_path)\n return stat.last_finish_time if stat else None", "def last_access_time(path):\r\n return os.stat(path)[stat.ST_ATIME]", "def get_mtime(path):\n try:\n return path.lstat().mtime\n except error.Error:\n pass", "def getmtime(path):\n return get_instance(path).getmtime(path)", "def getmtime(self, path):\n return os.path.getmtime(path)", "def get_last_fetch_time():\n if os.path.exists(LAST_FETCH_TIME_FILE):\n with open(LAST_FETCH_TIME_FILE, 'r') as f:\n last_fetch_time = f.read()\n\n return last_fetch_time\n return ''", "def load_last_run_time():\n # path = \"/Users/szou/Downloads/bu/happydogs/analytics_happydogs/last_time_run\"\n if os.path.isfile(\"last_time_run\"): #\n # If the file exists\n f = open(\"last_time_run\", \"r\")\n last_run_time = datetime.datetime.strptime(f.read(), \"%Y-%m-%d %H:%M:%S\")\n f.close()\n return last_run_time\n save_current_run_time()\n # If file doesn't exist (possible if it's the first run), return current time\n return datetime.datetime.now()", "def time_to_process_last_submission(self) -> int:\n return self.snapshot['time_to_process_last_submission']", "def __last_time(self):\n if self.__stopped is not None:\n return self.__stopped\n return self.__time()", "def get_time(self):\n return self.run_command('get_time')[0]", "def last_new_file_time(self) -> datetime.datetime:\n with self.lock:\n return self._last_new_file_time", "def get_last_update(self):\n last_update = os.path.getmtime(self.parent_filepath)\n return last_update", "def get_last_flight(self):\n return self.path[len(self.path) - 1]", "def lasttime(self):\n if hasattr(self, \"_lasttime\"):\n return self._lasttime\n else:\n return None", "def get_mtime(self):\n return os.path.getmtime(self.get_path())", "def getDate(path):\n utime = ftp.stat(path=path).st_mtime\n last_modified = datetime.fromtimestamp(utime)\n return last_modified", "def _GetUpdateTime(filename):\n stat_info = os.stat(filename)\n return (stat_info.st_atime, stat_info.st_mtime)", "def getlmtime(self):\n if self.islink() and self.lexists():\n st = os.lstat(self.path)\n return st.st_mtime\n return Stat.getmtime(self)", "def getctime(path):\n return get_instance(path).getctime(path)", "def getLastestTime(self):\n if not self.cache_times:\n return None\n return self.cache_times[-1]", "def get_last_time(time_file):\n \n try :\n last_time = int(open(time_file).read().rstrip('\\n'))\n except IOError :\n last_time = int(time.time())\n \n return last_time", "def get_last_time(self):\n \n return self._last", "def getmtime(self):\n if self.exists():\n return os.path.getmtime(self.path)\n return 0", "def get_time(self):\n # if the job is being processed or the CC had a crash return None\n if self.status <= 0:\n return None\n\n if self.status in (STATUS_FINISHED, 21):\n return self.resultTime\n\n return None", "def _get_last_meas_time(self):\n\n #if flag for whole data regeneration is set\n if self._process_type == 'full_gen':\n return datetime.datetime(1900, 1, 1, 0, 0, 0)\n \n \n res = self._db.Query(\"\"\"SELECT last_measurement_time\n FROM last_dashboard_element_segment_value\n WHERE\n element_id = %s\n AND segment_value_id = %s\n \"\"\",(self._id, self._segment_value_id))\n if not res:\n return datetime.datetime(1900, 1, 1, 0, 0, 0)\n item = self._db.record[0]\n if item['last_measurement_time']:\n return item['last_measurement_time']\n return datetime.datetime(1900, 1, 1, 0, 0, 0)", "def last_modified_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified_time\")", "def process_time():\n\n if Python2:\n return time.time()\n else:\n return time.process_time()", "def last_processed(self):\n return self._last_processed", "def last_updated_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_updated_time\")", "def get_cache_time(self, URL):\n\n cache_file = self.get_cache_file_path(URL)\n\n if cache_file.exists():\n cache_time = cache_file.stat().st_mtime\n else:\n cache_time = 0\n\n return cache_time" ]
[ "0.7153084", "0.7028886", "0.6791224", "0.66785485", "0.6607391", "0.65836203", "0.6493155", "0.6441159", "0.6370403", "0.63642323", "0.6360754", "0.63283616", "0.6318243", "0.6314464", "0.627511", "0.62685287", "0.6256992", "0.62244946", "0.62165654", "0.61994123", "0.61731297", "0.6169469", "0.6135725", "0.61332864", "0.60662353", "0.6052898", "0.603934", "0.6039192", "0.603133", "0.6011681" ]
0.72417444
0
Retrieve the total number of errors from processing a specific path.
def get_last_error_count(self, file_path) -> int | None: stat = self._file_stats.get(file_path) return stat.import_errors if stat else None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error_count(self):\n return len(self.errors)", "def total_errors(self):\r\n\r\n child_errors = sum(len(tree) for _, tree in iteritems(self._contents))\r\n return len(self.errors) + child_errors", "def get_error_count(self):\n return sum(1 for outcome in (r.outcome for r in self.values()) if outcome == Result.ERROR)", "def count(path):\n\ttry:\n\t\treturn len(os.listdir(path))\n\texcept Exception as e:\n\t\t# We most probably hit a permission denied here\n\t\treturn -1", "def _get_run_info(self, path, creation_date):\n total = 0\n try:\n for entry in os.scandir(path):\n # Only evaluates size of files and not folders inside raw/proc\n if entry.is_file():\n # if it's a file, use stat() function\n total += entry.stat().st_size\n\n except NotADirectoryError:\n # if `path` isn't a directory, get the file size then\n total = os.path.getsize(path)\n except PermissionError:\n # if for whatever reason we can't open the folder, return 0\n return 0\n\n if os.path.isdir(path):\n validator = RunValidator(path)\n elif path.endswith(\".h5\"):\n validator = FileValidator(H5File(path).files[0])\n else:\n return 0\n\n try:\n validator.run_checks()\n except Exception:\n pass\n return total, str(ValidationError(validator.problems))", "def total_errors(error_report):\n return(sum(error_report.values()))", "def getNumErrors(self):\n return _libsbml.XMLErrorLog_getNumErrors(self)", "def filesize(self, path):\n try:\n return len(self.extract(path, None))\n except Exception as e:\n return 0", "def size(path):", "def evaluatePath(self):\n pathLength = 0\n if len(self.path) > 0:\n previousCity = self.path[0]\n for ind in range(1, len(self.path)):\n pathLength += previousCity.distanceWith(self.path[ind].name)\n previousCity = self.path[ind]\n return pathLength", "def getsize(path):\n return stat(path).st_size", "def getFileSize( self, path ):\n res = self.__checkArgumentFormat( path )\n if not res['OK']:\n return res\n urls = res['Value']\n successful = {}\n failed = {}\n gLogger.debug( \"DIPStorage.getFileSize: Attempting to obtain size for %s files.\" % len( urls ) )\n res = self.getFileMetadata( urls )\n if not res['OK']:\n return res\n for url, urlDict in res['Value']['Successful'].items():\n if urlDict['Exists']:\n successful[url] = urlDict['Size']\n else:\n failed[url] = 'File does not exist'\n for url, error in res['Value']['Failed'].items():\n failed[url] = error\n resDict = {'Failed':failed, 'Successful':successful}\n return S_OK( resDict )", "def error_count():\n return cpp_style.error_count()", "def get_run_count(self, file_path) -> int:\n stat = self._file_stats.get(file_path)\n return stat.run_count if stat else 0", "def num_failed(self):\n return sum(cmd.failed for id, cmd in self.commands)", "def error_device_count(self):\n if \"errorDeviceCount\" in self._prop_dict:\n return self._prop_dict[\"errorDeviceCount\"]\n else:\n return None", "def getsize(path):\n return get_instance(path).getsize(path)", "def countTotalDistance(path):\n current = path[0]\n totalDistance = 0\n\n for node in path[1:]:\n totalDistance += distance_func(current, node)\n current = node\n\n return totalDistance", "def getPathLength(self, path):\r\n path_nodes = path\r\n # print(path_nodes)\r\n total_length = 0 # km\r\n for i in range(len(path_nodes)-1):\r\n next_edge = None\r\n for edge in self.graph.getAdj(path_nodes[i]):\r\n if edge.getExtremityNode() == path_nodes[i+1]:\r\n next_edge = edge\r\n if next_edge is None: # it means the path is invalid\r\n return None\r\n total_length += next_edge.getWeight()\r\n # print(next_edge.getTravelType(), end=\" \")\r\n return total_length", "def getNumErrors(self, *args):\n return _libsbml.SBMLDocument_getNumErrors(self, *args)", "def _disk_usage(path: pathlib.Path):\n if path.is_file():\n return path.stat().st_size\n elif path.is_dir():\n size_bytes = 0\n for file in path.iterdir():\n size_bytes += _disk_usage(file)\n return size_bytes\n else:\n raise NotImplementedError(\"What filetype is {file}?\")", "def failed_location_count(self) -> float:\n return pulumi.get(self, \"failed_location_count\")", "def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:", "def count_files_dir(self,full_path):\n try:\n num_files = len([name for name in os.listdir(full_path) if os.path.isfile(self.FILENAME)])\n print(f\"Number of files in {full_path} is {num_files}\")\n return num_files\n except Exception as e:\n raise SystemExit(f\"Could not complete operation: {e}\")", "def getFileCount(self, startingWithPath=\"\"):\n return self.__controller._getRecordsCount(startingWithPath)", "def aggregate_errors(assignment) -> int:\n return sum(len(problem.errors.all()) for problem in assignment.problems.all())", "def get_total_line_counts(self):\n return get_total_line_counts(self.files.all())", "def filesize(self, path):\n return self._handle.getinfo(path).file_size", "def filesize(self, path):\n return self._handle.getinfo(path).file_size", "def numPaths(self):\n if self.numpaths > -1:\n return self.numpaths\n\n if self.jolt == 0:\n return 1\n\n paths = 0\n for parent in self.parents:\n paths += parent.numPaths()\n \n return paths" ]
[ "0.68291694", "0.66321224", "0.6539631", "0.65059227", "0.6414099", "0.62368226", "0.62356204", "0.62267065", "0.6063823", "0.6063233", "0.6059517", "0.6041053", "0.60339576", "0.6027389", "0.6023086", "0.6006854", "0.59940124", "0.59850127", "0.5966939", "0.5951586", "0.59206086", "0.5892856", "0.58874905", "0.58848536", "0.5873631", "0.5859142", "0.5838555", "0.5809492", "0.5809492", "0.5805324" ]
0.6987642
0
Retrieve the last completion time for processing a specific path.
def get_last_finish_time(self, file_path) -> datetime | None: stat = self._file_stats.get(file_path) return stat.last_finish_time if stat else None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def last_completion_time(self) -> float:\n return self._last_completion_time - self._start_time", "def completion_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"completion_time\")", "def completion_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"completion_time\")", "def completion_time(self) -> datetime:\n return self._completion_time", "def last_access_time(path):\r\n return os.stat(path)[stat.ST_ATIME]", "def get_last_runtime(self, file_path) -> float | None:\n stat = self._file_stats.get(file_path)\n return stat.last_duration.total_seconds() if stat and stat.last_duration else None", "def get_mtime(path):\n try:\n return path.lstat().mtime\n except error.Error:\n pass", "def last_new_file_time(self) -> datetime.datetime:\n with self.lock:\n return self._last_new_file_time", "def round_trip_time(self):\r\n return self.completion_time - self.launch_time", "def getmtime(self, path):\n return os.path.getmtime(path)", "def time_to_process_last_submission(self) -> int:\n return self.snapshot['time_to_process_last_submission']", "def last_optime(self):\n return self._last_optime", "def getmtime(path):\n return get_instance(path).getmtime(path)", "def getEndTime(self):\n assert self.isFinished(), \"Too early to tell: %s\" % self\n return \"%s\" % self.__rawInfo.endTime", "def get_time(self):\n # if the job is being processed or the CC had a crash return None\n if self.status <= 0:\n return None\n\n if self.status in (STATUS_FINISHED, 21):\n return self.resultTime\n\n return None", "def __last_time(self):\n if self.__stopped is not None:\n return self.__stopped\n return self.__time()", "def completion_time_utc(self) -> Optional[str]:\n return pulumi.get(self, \"completion_time_utc\")", "def get_last_fetch_time():\n if os.path.exists(LAST_FETCH_TIME_FILE):\n with open(LAST_FETCH_TIME_FILE, 'r') as f:\n last_fetch_time = f.read()\n\n return last_fetch_time\n return ''", "def getEndTime(self):\n assert self.isFinished(), \"Too early to tell: %s\" % self\n return \"%s\" % self.__jobInfo.endTime", "def lasttime(self):\n if hasattr(self, \"_lasttime\"):\n return self._lasttime\n else:\n return None", "def response_time(self):\r\n if self.__arrival_time == INVALID_TIME:\r\n self.__logger.debug(\"Request %s missing arrival time\" % self.__id)\r\n return INVALID_TIME_DELTA\r\n completion_time = self.__arrival_time\r\n for task_id, task in self.__tasks.items():\r\n if task.completion_time == INVALID_TIME:\r\n self.__logger.debug((\"Task %s in request %s missing completion \"\r\n \"time\") % (task_id, self.__id))\r\n return INVALID_TIME_DELTA\r\n task_completion_time = task.adjusted_completion_time()\r\n #if task.scheduler_launch_time > task.node_monitor_launch_time:\r\n #self.__logger.warn((\"Task %s suggests clock skew: scheduler launch time %d, node \"\r\n # \"monitor launch time %d\") %\r\n\r\n #(task_id, task.scheduler_launch_time,\r\n # task.node_monitor_launch_time))\r\n completion_time = max(completion_time, task_completion_time)\r\n return completion_time - self.__arrival_time", "def completion_time(self, url: Optional[str] = None) -> float:\n if not url:\n return max(self.completed_nodes.values())\n return self.completed_nodes[self.url_to_node_map[url]]", "def get_last_time(self):\n \n return self._last", "def completed(self):\n if not self.completion_ts:\n return None\n return datetime.utcfromtimestamp(self.completion_ts)", "def receive_and_probing_time(self):\r\n latest_completion = 0\r\n for probe in self.__probes.values():\r\n\t\t \t if probe.complete():\r\n\t\t\t \t\t latest_completion = max(latest_completion, probe.completion_time)\r\n return latest_completion - self.__arrival_time", "def get_mtime(self):\n return os.path.getmtime(self.get_path())", "def estimate_completion(self):\n if self.completion_ts:\n # Task is already complete. Return the exact completion time:\n defer.returnValue(self.completed)\n # Get the timestamps from the descendent task that's doing the work:\n if self.method == 'build' or self.method == 'image':\n subtask_completion = yield self.estimate_descendents()\n defer.returnValue(subtask_completion)\n if self.state == task_states.FREE:\n est_completion = yield self._estimate_free()\n defer.returnValue(est_completion)\n avg_delta = yield self.estimate_duration()\n if avg_delta is None:\n defer.returnValue(None)\n est_completion = self.started + avg_delta\n defer.returnValue(est_completion)", "def completed_on(self):\n return self.get_time(\"completed_on\")", "def completed_on(self):\n return self.get_time(\"completed_on\")", "def _GetUpdateTime(filename):\n stat_info = os.stat(filename)\n return (stat_info.st_atime, stat_info.st_mtime)" ]
[ "0.71924007", "0.71125346", "0.71125346", "0.68504256", "0.6718735", "0.66397405", "0.6231912", "0.6220517", "0.621313", "0.6198457", "0.6186671", "0.6135579", "0.61289394", "0.6083259", "0.6079001", "0.60781735", "0.6078041", "0.6074305", "0.6071143", "0.6070091", "0.6058746", "0.60547954", "0.6025021", "0.60222876", "0.59898025", "0.5982407", "0.59652865", "0.5959967", "0.5959967", "0.59429437" ]
0.73683923
0
Retrieve the last start time for processing a specific path.
def get_start_time(self, file_path) -> datetime | None: if file_path in self._processors: return self._processors[file_path].start_time return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_starttime(self):\n filetime = datetime.datetime.strptime(self.filenametime,\n \"%Y%m%d_%H%M%S\")\n if self.ldat_type != 'acc':\n starttime = filetime\n else:\n starttime = filetime - datetime.timedelta(seconds=512)\n return starttime", "def __get_starting_time(self):\n return self.__starting_time", "def get_last_runtime(self, file_path) -> float | None:\n stat = self._file_stats.get(file_path)\n return stat.last_duration.total_seconds() if stat and stat.last_duration else None", "def get_last_finish_time(self, file_path) -> datetime | None:\n stat = self._file_stats.get(file_path)\n return stat.last_finish_time if stat else None", "def get_start_time(self):\n # Timezone and BST not accounted for. Always gives it as GMT.\n create_time = (os.path.getmtime(self.file_path))\n start_time = create_time - len(self.amplitude) / self.fs\n return datetime.fromtimestamp(start_time)", "def get_start_time(self):\n # Searching through qstat and grabbing only the start time. Lot of weeding out.\n qstat = subprocess.getoutput(\"qstat -j {0}\".format(self.id))\n qstat = qstat[qstat.find(\"start_time\"):]\n qstat = qstat[:qstat.find('\\n')]\n return qstat[28:]", "def start_time(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"start_time\")", "def start_time(self):\n return self._get(\"start_time\")", "def get_last_started_at(self) -> int:\n # XXX: defaults to 1 just to force indexes initialization, by being higher than 0\n return int(self.get_value(self._last_start_attribute) or NULL_LAST_STARTED_AT)", "def getStartTime(self):\n assert not self.isWaitingToStart(), \"Too early to tell: %s\" % self\n return \"%s\" % self.__rawInfo.startTime", "def __last_time(self):\n if self.__stopped is not None:\n return self.__stopped\n return self.__time()", "def last_access_time(path):\r\n return os.stat(path)[stat.ST_ATIME]", "def start_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"start_time\")", "def get_start_time(self):\n return min([m.get_start_time() for m in self._mappers])", "def getStartTime(self):\n raise NotImplementedError", "def getStartTime(self):\n return _osgAnimation.Animation_getStartTime(self)", "def start_time(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> datetime:\n return self.root_hartree.start_time", "def start_time(self):\n return self.time_parser.start_time", "def getStartTime(self):\n assert not self.isWaitingToStart(), \"Too early to tell: %s\" % self\n return \"%s\" % self.__jobInfo.startTime", "def getStartTime(self):\n return _osgAnimation.Channel_getStartTime(self)", "def start_time(self) -> Optional[str]:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> str:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> str:\n return pulumi.get(self, \"start_time\")", "def load_last_run_time():\n # path = \"/Users/szou/Downloads/bu/happydogs/analytics_happydogs/last_time_run\"\n if os.path.isfile(\"last_time_run\"): #\n # If the file exists\n f = open(\"last_time_run\", \"r\")\n last_run_time = datetime.datetime.strptime(f.read(), \"%Y-%m-%d %H:%M:%S\")\n f.close()\n return last_run_time\n save_current_run_time()\n # If file doesn't exist (possible if it's the first run), return current time\n return datetime.datetime.now()", "def start_time(self):\n return self.__start", "def start_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"start_time\")", "def start_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"start_time\")" ]
[ "0.7114907", "0.6834792", "0.67970824", "0.67896086", "0.6649942", "0.658254", "0.65279", "0.650036", "0.6494617", "0.6490718", "0.6469819", "0.64607596", "0.6422892", "0.6396065", "0.6393572", "0.638003", "0.6376569", "0.6376569", "0.63747007", "0.6368677", "0.6352756", "0.63331544", "0.6329823", "0.6329793", "0.6329793", "0.63187766", "0.6318484", "0.6291679", "0.6291679", "0.6291679" ]
0.71356505
0
Return the number of times the given file has been parsed.
def get_run_count(self, file_path) -> int: stat = self._file_stats.get(file_path) return stat.run_count if stat else 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def line_count(file):\n with open(file, \"r\") as f:\n return sum(1 for line in f)", "def total_number():\r\n total_number = 0\r\n file_read = read_file()\r\n for key in file_read:\r\n total_number = total_number + len(file_read[key])\r\n return total_number", "def fileCounter(directory):", "def number_of_lines(filename=\"\"):\n count = 0\n with open(filename) as f:\n for lines in f:\n count += 1\n return (count)", "def parse_file_count(path, args):\n try:\n fisier = open(path, 'r')\n except IOError:\n print(\"Nu am putut deschide fisierul :\", path)\n return\n n_found = 0\n pattern = args.pattern\n for line in fisier:\n if args.ignore_case:\n line = line.lower()\n pattern = pattern.lower()\n n_found += line.count(pattern)\n\n fisier.close()\n return n_found", "def count_lines(filename):\r\n with open(filename, 'rb') as f:\r\n return sum(1 for line in f)", "def fileCount(self):\n pass", "def get_file_line_count(a_file):\r\n count = -1\r\n try:\r\n for count, line in enumerate(open(a_file, \"rU\")):\r\n pass\r\n except IOError:\r\n pass\r\n count += 1\r\n return count", "def get_file_line_count(a_file):\r\n count = -1\r\n try:\r\n for count, line in enumerate(open(a_file, \"rU\")):\r\n pass\r\n except IOError:\r\n pass\r\n count += 1\r\n return count", "def bpCount(file):\n amount_bp = len(file)\n return amount_bp", "def CountLineNumber(filename):\n\n fp = open(os.path.abspath(filename), \"r\");\n lines = 0\n for line in fp.readlines():\n lines = lines + 1\n fp.close()\n return lines", "def count_lines(filename):\n with open(filename, 'rb') as f:\n return sum(1 for line in f)", "def file_number_of_lines(file_name):\n try:\n item = (0, None)\n with open(file_name) as file_handler:\n for item in enumerate(file_handler):\n pass\n return item[0] + 1\n except IOError:\n return 0", "def numberFiles(self):\n with open(self.inputfile) as fin:\n for n, _ in enumerate(fin, start=1): pass\n self.n = n\n return self.n", "def number_of_lines(filename=\"\"):\n c = 0\n with open(filename) as f:\n for r in f:\n c += 1\n return(c)", "def countsubcatchments(inputfilename=FileSettings.settingsdict['inputfilename']):\r\n global count\r\n with open(inputfilename, 'r') as swmmput:\r\n contents = swmmput.readlines()\r\n count = len(contents)\r\n return(count)", "def number_of_lines(filename=\"\"):\n counter = 0\n with open(filename, \"r\") as my_file:\n for line in my_file:\n counter += 1\n my_file.close()\n return (counter)", "def getFileCount(self) -> int:\n ...", "def find_dimesion(filename):\n file = open(filename,\"r\")\n\n line = file.readline()\n file.close()\n return len(line.split())", "def number_of_lines(filename=\"\"):\n with open(filename, encoding='utf-8') as myFile:\n return sum([1 for line in myFile])", "def line_count(fname):\n return int(call(['wc', '-l', fname]).strip().split()[0])", "def n_file(self):\n self.assert_is_dir_and_exists()\n n = 0\n for _ in self.select_file(recursive=True):\n n += 1\n return n", "def count_number_of_reads(filename: Path) -> int:\n\tif filename.suffix == '.gz':\n\t\tcommand = f\"zcat {filename}\"\n\telse:\n\t\tcommand = f\"cat {filename}\"\n\tprocess = subprocess.Popen(command.split(), stdout = subprocess.PIPE)\n\toutput = subprocess.check_output([\"wc\", \"-l\"], stdin = process.stdout)\n\n\treads = int(output.strip()) / 4\n\treturn int(reads)", "def fileLineCount(fPath):\n\twith open(fPath) as f:\n\t\tfor i, li in enumerate(f):\n\t\t\tpass\n\treturn (i + 1)", "def countSamples(filename):\n with open(filename, \"r\") as f:\n line = f.readline().split(\"\\t\")\n return len(line) - 2", "def file_len(filename):\n with open(filename) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def map_count(filename):\n f = open(filename, \"r+\")\n buf = mmap.mmap(f.fileno(), 0)\n lines = 0\n readline = buf.readline\n while readline():\n lines += 1\n return lines", "def file_length(fileName):\n with open(f_pass) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def count_seqs_from_file(fasta_file, parser=parse_fasta):\r\n result = 0\r\n lens = []\r\n for record in parser(fasta_file):\r\n result += 1\r\n lens.append(len(record[1]))\r\n if result == 0:\r\n return result, None, None\r\n else:\r\n return result, mean(lens), std(lens)", "def file_len(file_name):\n with open(file_name) as f:\n for i, l in enumerate(f):\n pass\n return i + 1" ]
[ "0.71401644", "0.69211084", "0.6917881", "0.6879534", "0.68474394", "0.6822997", "0.6816414", "0.68026906", "0.68026906", "0.67917204", "0.6774328", "0.67685753", "0.6762162", "0.67529136", "0.6736762", "0.6731634", "0.67200154", "0.6715869", "0.6663942", "0.6640681", "0.6634842", "0.66270125", "0.662315", "0.6593987", "0.65801877", "0.6578597", "0.6566968", "0.654048", "0.6484713", "0.6481792" ]
0.7443984
0
Return the dag_director as a string.
def get_dag_directory(self) -> str: if isinstance(self._dag_directory, Path): return str(self._dag_directory.resolve()) else: return str(self._dag_directory)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n target = self.signal_target\n if target is None:\n target = self.signal_filter.__name__\n return \"Synapse(%s <-> %s)\" % (self.organ_id, target)", "def dag_name(self):\n return self._dag_name", "def __str__(self):\n\t\treturn str(self.graph)", "def __str__(self):\n schedule = \"\"\n\n schedule_list = self.in_order_traversal()\n for node in schedule_list:\n schedule += str(node)\n if node is not schedule_list[-1]:\n schedule += \" \"\n return schedule", "def __str__(self):\n return '{}({})'.format(self.__class__.__name__, dict(self._graph))", "def __str__(self):\n directors = \"\"\n if isinstance(self.director, types.StringTypes):\n directors = self.director\n else:\n directors = ', '.join(self.director)\n\n return \"%s | %s | %s\" % ( self.title, directors, self.rating )", "def __str__(self):\n stringRepresentation = []\n for node in self.getNodes():\n stringRepresentation.append(\"->\".join(\n (str(node), str(self.graph[node]))))\n\n return str(stringRepresentation)", "def __str__(self):\n return super().__str__() + f'Lead Actor: {self.actor}\\nDirector: {self.director}\\n'", "def __str__(self):\n s = ''\n for node in self.nodes:\n s += '\\n\\n'+str(node)+'\\n\\t'\n edges = node.getChildren()\n keys = edges.keys()\n keys.sort()\n for key in keys:\n bounds = edges[key].getSuffix()\n s += str(edges[key])+' '\n for i in xrange(bounds[0], bounds[1]):\n s += self.target[i]\n s += '\\n\\t'\n return s", "def __str__(self):\n string = \"\"\n for i in self.tour_ids:\n string = string + str(i) + \" -> \"\n string += str(self.tour_ids[0])\n return string", "def __str__(self):\n return np.array2string(self.graph.toarray())", "def __str__(self):\n runner = self.__head\n if runner is None:\n return \"\"\n while runner.next_node:\n if runner is not None:\n print(\"{}\".format(runner.data))\n runner = runner.next_node\n return \"{}\".format(runner.data)", "def __str__(self):\n return '<Node%d> TC: %d BP: %d DN: %s\\n' % (self.id,\n self.travelCount,\n self.botPresent,\n repr(self.dirNodes))", "def to_string(self):\r\n return self.command()", "def as_string (self) :\n\n if self.is_machinetag() :\n return \"%s:%s=%s\" % (self.namespace(), self.predicate(), self.value())", "def __str__(self):\n struct_repr = \", \".join([\n \"group: \" + str(self.group),\n \"controls: \" + str(self.controls)\n ])\n\n return f\"ActuatorControlTarget: [{struct_repr}]\"", "def __str__(self):\n\t\treturn str(self.__dStore)", "def get_target(self):\n task = self.task.get_task(self.task_id)\n if 'name' in task:\n return str(task['name'])\n return str(task)", "def __str__(self):\n outstr = [\"\\n<%s: %s>\" % (self.__class__, self.name)]\n outstr.append(\"%d graphs\" % len(self._graphs))\n outstr = \"\\n\".join(outstr)\n return outstr", "def __str__(self):\n s = 'Processor ' + __name__\n # if self._rule_files:\n # s += ' running with rules ' + ' '.join(self._rule_files.values())\n\n return s", "def nameToDagPath(name):\n\n pass", "def __str__(self):\n\n pg_str = pformat(self.content)\n repr_str = \"Node ID: {} \\nNode Name: {} \\n{}\".format(self.id, self.name, pg_str)\n\n return repr_str", "def __str__(self):\n str = \"[{}] ({}) {}\"\n return (str.format(self.__class__.__name__, self.id, self.__dict__))", "def __str__(self):\n if self.connected_to.get_size() != 0:\n s = ''\n for connection in self.connected_to:\n s += f\"{self.name} -> {connection.get_name()} \"\n return s\n return self.name", "def __repr__(self):\n segment_str = str(self.segment)\n shortname_str = str(self.shortname)\n return '{}: {}'.format(shortname_str, segment_str)", "def to_string(self):\n return self.name", "def __str__(self):\n info = 'cmd=\"%s\", pid=%d, cwd=%s, alive=%s' % ( self._cmd, \\\n self._pid, \\\n repr(self._cwd), \\\n self.alive)\n return '<%s %s>' % (self.__class__.__name__, info)", "def __repr__(self) -> str:\n context = \" \".join(\"{}={}\".format(k, v) for k, v in self._solution.items())\n return \"<Twilio.Taskrouter.V1.WorkflowInstance {}>\".format(context)", "def __str__(self):\n\n result = \"\"\n\n temp = self.head\n while temp is not None:\n result += str(temp.data) + \" -> \"\n temp = temp.next\n\n return result[0:-4]", "def dag_container_name(self) -> str:\n return (\n \"airflow_\" +\n self.dag_name().replace(\".\", \"_\")\n )" ]
[ "0.6590569", "0.62509996", "0.6173706", "0.6095808", "0.60756105", "0.60463786", "0.6001741", "0.5846653", "0.58369434", "0.5813847", "0.5797878", "0.5797136", "0.5790996", "0.57783884", "0.57694286", "0.5727002", "0.5662419", "0.5660866", "0.5648074", "0.5633061", "0.56296074", "0.56193465", "0.5605722", "0.55982536", "0.55934095", "0.55874914", "0.55817485", "0.5579521", "0.557292", "0.55714357" ]
0.6324174
1
Sleeps until all the processors are done.
def wait_until_finished(self): for processor in self._processors.values(): while not processor.done: time.sleep(0.1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_all_process_done(self) -> None:\n while len(self.process_queue) > 0:\n self.check_process_done()", "def wait_threads(self):\n\n self.queue.join()", "def wait_forever(self):\r\n while True:\r\n time.sleep(0.5)", "def wait(self):\n time.sleep(self.next())", "def wait(self):\r\n self.jobs.join()", "def invoke_all_and_wait(self):\n list_promise = []\n for thread in self.__list_thread:\n thread.start()\n list_promise.append(thread)\n for process in list_promise: process.join()", "def wait_completion(self):\r\n self.tasks.join()", "def startLoop(self):\n while not self.completed:\n self.fillJobQueue()\n self.cleanJobQueue()\n # TODO May want to revisit this:\n # http://stackoverflow.com/questions/29082268/python-time-sleep-vs-event-wait\n # probably when we move to Python 3.\n time.sleep(self.sleepTime)", "def _wait_for_all_operations_done(self):\n while self._test_names_to_processes:\n time.sleep(10)\n running_test_names = list(self._test_names_to_processes.keys())\n for test_name in running_test_names:\n running_proc = self._test_names_to_processes.get(test_name)\n return_code = running_proc.poll()\n if return_code is not None:\n test_case_state = self._test_names_to_test_states.get(test_name)\n self._handle_failure(running_proc, test_case_state.running_test)\n del self._test_names_to_processes[test_name]\n print('Started validating: {}'.format(test_name))\n test_case_state.running_test.validate_result()\n self._run_test(test_case_state.remaining_tests)", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wait_completion(self):\n self.tasks.join()", "def wake_all_threads(self):\n self.advance_time(increment_by=0.0)", "def block_waiting( self ):\n while self.num_waiting > 0:\n time.sleep( 1 )", "def join(self):\n while not self._stop:\n time.sleep(0.1)\n for t in reversed(self.tasks):\n t.join()", "def wait_for_completion(self):\n self.logger.debug(\"Waiting for completion\")\n finished = False\n while not finished:\n if self._all_workers_are_idle():\n self.logger.info(\"Finished\")\n finished = True", "def joiner():\n for th in threads:\n th.join()\n done(process)", "def waitFinish(self):\n while self.job_queue_count > 0:\n sleep(0.5)\n\n # If there was a failure, we don't want to wait for possibly halted threads\n # while performing a 'join'. So just exit now with a failure.\n if self.failure:\n sys.exit(1)\n\n self.worker_pool.close()\n self.worker_pool.join()\n self.status_pool.close()\n self.status_pool.join()", "def poll_process_done(self) -> None:\n while len(self.process_queue) >= self.max_processes:\n self.check_process_done()", "def wait_until_all_activity_stops():\n if main_greenlet is None:\n return\n while other_threads_are_active():\n fake_sleep(1)", "def schdule(self):\n while self.queue:\n if self.processing >= self.maxProcessing:\n # We have reached the maximum number of parallel\n # tasks.\n break\n\n item, completeDeferred = self.queue.pop(0)\n\n self.processing += 1 \n self.start(item).addBoth(self.done).chainDeferred(completeDeferred)", "def loop_wait(self):\n # Do in reverse so sockets (first) can send anything the other loops\n # produce\n self.log_debug(\"Waiting for loop to finish\")\n loops = reversed(getattr(self, \"_loops\", []))\n for loop in loops:\n loop.loop_wait()\n self.remove_loop(loop)\n loops = None\n self.loop_confirm_stopped()", "def _proc_collect(self) -> None:\n while True:\n self.process_num_threads.set(self._process.num_threads())\n self.process_memory_bytes.set(self._process.memory_info().rss)\n self.process_cpu_percent.set(self._process.cpu_percent())\n\n sleep(self.process_scrape_interval)", "def _wait_empty(self):\n while True:\n if self.queue.empty():\n # We still have to wait for the last queue item being processed\n # (queue.empty() returns True before queue.task_done() is\n # called).\n self.queue.join()\n return\n time.sleep(1)" ]
[ "0.6963551", "0.6772784", "0.6609773", "0.66002274", "0.6512814", "0.6456526", "0.63595754", "0.6347221", "0.63418", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.6324502", "0.63123554", "0.6303206", "0.62664676", "0.62619126", "0.6254391", "0.62412983", "0.61677784", "0.61600703", "0.6149565", "0.610973", "0.6102147", "0.6067996" ]
0.80391806
0
Collect the result from any finished DAG processors.
def collect_results(self) -> None: ready = multiprocessing.connection.wait( self.waitables.keys() - [self._direct_scheduler_conn], timeout=0 ) for sentinel in ready: if sentinel is self._direct_scheduler_conn: continue processor = cast(DagFileProcessorProcess, self.waitables[sentinel]) self.waitables.pop(processor.waitable_handle) self._processors.pop(processor.file_path) self._collect_results_from_processor(processor) self.log.debug("%s/%s DAG parsing processes running", len(self._processors), self._parallelism) self.log.debug("%s file paths queued for processing", len(self._file_path_queue))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_results(self):\n return self._do_action_under_lock(self._process_results)", "def process(self, results):\n raise NotImplementedError", "def collect(self):\n pass", "def collect(self):\n if self._completion_status is None or not self._completion_status.done:\n raise RuntimeError(\"No reading until done!\")\n self._completion_status = None\n\n yield from self._data", "def return_results(self):\n\n message = 'INFO: entering return_results'\n self.report(message)\n\n # try/except to capture as mnuch as possible (everything that is there even when workflow exits unsuccessfully)\n # capture pk and uuids of last calc, params and remote\n try:\n last_calc_uuid = self.ctx.last_calc.uuid\n last_calc_pk = self.ctx.last_calc.pk\n last_params_uuid = self.ctx.last_params.uuid\n last_params_pk = self.ctx.last_params.pk\n last_remote_uuid = self.ctx.last_remote.uuid\n last_remote_pk = self.ctx.last_remote.pk\n except:\n last_calc_uuid = None\n last_calc_pk = None\n last_params_uuid = None\n last_params_pk = None\n last_remote_uuid = None\n last_remote_pk = None\n\n all_pks = []\n for calc in self.ctx.calcs:\n try:\n all_pks.append(calc.pk)\n except:\n self.ctx.warnings.append(f'cound not get pk of calc {calc}')\n\n # capture links to last parameter, calcualtion and output\n try:\n last_calc_out = self.ctx.kkr.out['output_parameters']\n last_calc_out_dict = last_calc_out.get_dict()\n last_RemoteData = self.ctx.last_remote\n last_InputParameters = self.ctx.last_params\n except:\n last_InputParameters = None\n last_RemoteData = None\n last_calc_out = None\n last_calc_out_dict = {}\n\n # capture convergence info\n try:\n last_rms = self.ctx.rms[-1]\n except:\n last_rms = None\n\n # now collect results saved in results node of workflow\n message = 'INFO: collect outputnode_dict'\n self.report(message)\n outputnode_dict = {}\n outputnode_dict['workflow_name'] = self.__class__.__name__\n outputnode_dict['workflow_version'] = self._workflowversion\n outputnode_dict['material'] = self.ctx.formula\n outputnode_dict['loop_count'] = self.ctx.loop_count\n outputnode_dict['warnings'] = self.ctx.warnings\n outputnode_dict['successful'] = self.ctx.successful\n outputnode_dict['last_params_nodeinfo'] = {'uuid': last_params_uuid, 'pk': last_params_pk}\n outputnode_dict['last_remote_nodeinfo'] = {'uuid': last_remote_uuid, 'pk': last_remote_pk}\n outputnode_dict['last_calc_nodeinfo'] = {'uuid': last_calc_uuid, 'pk': last_calc_pk}\n outputnode_dict['pks_all_calcs'] = all_pks\n outputnode_dict['convergence_value'] = last_rms\n outputnode_dict['convergence_values_all_steps'] = array(self.ctx.rms_all_steps)\n outputnode_dict['convergence_values_last_step'] = array(self.ctx.last_rms_all)\n outputnode_dict['convergence_reached'] = self.ctx.kkr_converged\n outputnode_dict['kkr_step_success'] = self.ctx.kkr_step_success\n outputnode_dict['used_higher_accuracy'] = self.ctx.kkr_higher_accuracy\n\n # report the status\n if self.ctx.successful:\n self.report(\n 'STATUS: Done, the convergence criteria are reached.\\n'\n 'INFO: The charge density of the KKR calculation pk= {} '\n 'converged after {} KKR runs and {} iterations to {} \\n'\n ''.format(\n last_calc_pk, self.ctx.loop_count - 1, sum(self.ctx.KKR_steps_stats.get('isteps', [])),\n self.ctx.last_rms_all[-1]\n )\n )\n else: # Termination ok, but not converged yet...\n self.report(\n 'STATUS/WARNING: Done, the maximum number of runs '\n 'was reached or something failed.\\n INFO: The '\n 'charge density of the KKR calculation pk= '\n 'after {} KKR runs and {} iterations is {} \"me/bohr^3\"\\n'\n ''.format(\n self.ctx.loop_count - 1, sum(self.ctx.KKR_steps_stats.get('isteps', [])), self.ctx.last_rms_all[-1]\n )\n )\n\n # create results node and link all calculations\n message = 'INFO: create results nodes'\n self.report(message)\n link_nodes = {}\n icalc = 0\n for calc in self.ctx.calcs:\n link_nodes[f'KkrimpCalc{icalc}'] = calc.outputs.remote_folder\n icalc += 1\n if not self.ctx.dos_run:\n link_nodes['final_imp_potential'] = self.ctx.last_pot\n outputnode_t = create_out_dict_node(Dict(dict=outputnode_dict), **link_nodes)\n outputnode_t.label = 'kkr_scf_wc_results'\n outputnode_t.description = 'Contains results of workflow (e.g. workflow version number, info about success of wf, lis tof warnings that occured during execution, ...)'\n\n self.out('workflow_info', outputnode_t)\n # store out_potential as SingleFileData only if this was no DOS run\n if not self.ctx.dos_run:\n self.out('host_imp_pot', self.ctx.last_pot)\n\n # print results table for overview\n # table layout:\n message = 'INFO: overview of the result:\\n\\n'\n message += '|------|---------|--------|------|--------|---------|-----------------|---------------------------------------------|\\n'\n message += '| irun | success | isteps | imix | mixfac | qbound | rms | pk and uuid |\\n'\n message += '| | | | | | | first | last | |\\n'\n message += '|------|---------|--------|------|--------|---------|--------|--------|---------------------------------------------|\\n'\n KKR_steps_stats = self.ctx.KKR_steps_stats\n for irun in range(len(KKR_steps_stats.get('success', []))):\n message += '|%6i|%9s|%8i|%6i|%.2e|%.3e|%.2e|%.2e|' % (\n irun + 1, KKR_steps_stats.get('success')[irun], KKR_steps_stats.get('isteps')[irun],\n KKR_steps_stats.get('imix')[irun], KKR_steps_stats.get('mixfac')[irun],\n KKR_steps_stats.get('qbound')[irun], KKR_steps_stats.get('first_rms')[irun],\n KKR_steps_stats.get('last_rms')[irun]\n )\n message += f\" {KKR_steps_stats.get('pk')[irun]} | {KKR_steps_stats.get('uuid')[irun]}|\\n\"\n message += '|------|---------|--------|------|--------|---------|-----------------|---------------------------------------------|\\n'\n \"\"\"\n message += \"#|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|\\n\".format(irun+1,\n KKR_steps_stats.get('success')[irun], KKR_steps_stats.get('isteps')[irun],\n KKR_steps_stats.get('imix')[irun], KKR_steps_stats.get('mixfac')[irun],\n KKR_steps_stats.get('qbound')[irun],\n KKR_steps_stats.get('first_rms')[irun], KKR_steps_stats.get('last_rms')[irun])\n \"\"\"\n self.report(message)\n\n # cleanup of unnecessary files after convergence\n # WARNING: THIS DESTROYS CACHABILITY OF THE WORKFLOW!!!\n if self.ctx.do_final_cleanup:\n if self.ctx.successful:\n self.report('INFO: clean output of calcs')\n remove_out_pot_impcalcs(self.ctx.successful, all_pks)\n self.report('INFO: clean up raw_input folders')\n clean_raw_input(self.ctx.successful, all_pks)\n\n # clean intermediate single file data which are not needed after successful run or after DOS run\n if self.ctx.successful or self.ctx.dos_run:\n self.final_cleanup()\n\n self.report('INFO: done with kkr_scf workflow!\\n')", "def completion_processor(self):\n while True:\n _ = (yield)\n self.solve_completed = True", "def do_process(self):\n for k in self.processor.process():\n self._progress = k\n\n self.output_container = self.processor.target_container # type: converter.containers.Container", "def process(self):\n while not self.halted:\n self.step()\n return self.outputs", "def collect_output(self):\n pass", "def collect_output(self):\n pass", "def results(self):\n node = self.ctx.children[self.ctx.iteration - 1]\n\n # We check the `is_finished` attribute of the work chain and not the successfulness of the last process\n # because the error handlers in the last iteration can have qualified a \"failed\" process as satisfactory\n # for the outcome of the work chain and so have marked it as `is_finished=True`.\n max_iterations = self.inputs.max_iterations.value # type: ignore[union-attr]\n if not self.ctx.is_finished and self.ctx.iteration >= max_iterations:\n self.report(\n f'reached the maximum number of iterations {max_iterations}: '\n f'last ran {self.ctx.process_name}<{node.pk}>'\n )\n return self.exit_codes.ERROR_MAXIMUM_ITERATIONS_EXCEEDED # pylint: disable=no-member\n\n self.report(f'work chain completed after {self.ctx.iteration} iterations')\n\n # Simply attach the output of the last children\n self.out_many({key: node.outputs[key] for key in node.outputs})\n return None", "def get_results():\n result = self._recv_result() # blocks\n del self._tasks_in_progress[result.task_id]\n del self._task_results_waiting[result.task_id]\n yield result.value", "def get_results():\n self._recv_result() # blocks\n tasks = self._tasks_in_progress\n results = self._task_results_waiting\n\n for task_id in tasks.keys():\n if task_id not in results:\n break\n\n del tasks[task_id]\n result = results.pop(task_id)\n yield result.value", "def collect(self):\n while self.proc is not None:\n self.read()\n if not len(self.datalines):\n return\n while len(self.datalines):\n # pop the first node of list\n yield self.datalines.pop(0)", "def results(self) -> ResultProcessor:\n if self.isAnalysisCompleted():\n return ResultProcessor('input')\n else:\n raise ValueError('Results were not available')", "def _collect_all(self):", "def collect(self):\n while True:\n if not self._queue.empty():\n message = self._queue.get()\n self.working_on = message['job_type']\n else:\n break\n logging.info(\"Popped off message: {}\\n\".format(str(message)))\n\n if message['job_type'] == 'STOP':\n break\n\n if message['job_type'] != 'MAINTAIN' and message['job_type'] != 'UPDATE':\n raise ValueError('{} is not a recognized task type'.format(message['job_type']))\n pass\n\n # Query all repos with repo url of given task\n repoUrlSQL = s.sql.text(\"\"\"\n SELECT min(repo_id) as repo_id FROM repo WHERE repo_git = '{}'\n \"\"\".format(message['given']['github_url']))\n repo_id = int(pd.read_sql(repoUrlSQL, self.db, params={}).iloc[0]['repo_id'])\n\n try:\n if message['models'][0] == 'pull_requests':\n self.pull_requests_model(message, repo_id)\n elif message['models'][0] == 'pull_request_commits':\n self.pull_request_commits_model(message, repo_id)\n elif message['models'][0] == 'pull_request_files':\n self.pull_requests_graphql(message, repo_id)\n except Exception as e:\n register_task_failure(self, message, repo_id, e)\n pass", "def aggregate_results(self):\n\n raise NotImplementedError", "def process_results(self):\n\n while not self.results.empty():\n mvt = self.results.get()\n\n for peer in self.peers_list:\n peer.check_mvt(mvt)\n\n self.results.task_done()", "def collect(self):\n while True:\n if not self._queue.empty():\n message = self._queue.get()\n self.working_on = message['job_type']\n else:\n break\n logging.info(\"Popped off message: {}\\n\".format(str(message)))\n\n if message['job_type'] == 'STOP':\n break\n\n if message['job_type'] != 'MAINTAIN' and message['job_type'] != 'UPDATE':\n raise ValueError('{} is not a recognized task type'.format(message['job_type']))\n pass\n\n \"\"\" Query all repos with repo url of given task \"\"\"\n repoUrlSQL = s.sql.text(\"\"\"\n SELECT min(repo_id) as repo_id FROM repo WHERE repo_git = '{}'\n \"\"\".format(message['given']['git_url']))\n repo_id = int(pd.read_sql(repoUrlSQL, self.db, params={}).iloc[0]['repo_id'])\n\n try:\n if message['models'][0] == 'badges':\n self.badges_model(message, repo_id)\n except Exception as e:\n register_task_failure(self, logging, message, repo_id, e)\n pass", "def drain_results_queue(self):\n while len(self._scheduled) > 0:\n self.process_results()", "def collect_all(self):\r\n self.clear()\r\n self._process_lines(self._collect_all())", "def _process(self):\n export_collect_data(self.kwargs[\"collect\"])", "def _proc_collect(self) -> None:\n while True:\n self.process_num_threads.set(self._process.num_threads())\n self.process_memory_bytes.set(self._process.memory_info().rss)\n self.process_cpu_percent.set(self._process.cpu_percent())\n\n sleep(self.process_scrape_interval)", "def aggregate_results(observers):\n return None", "def collect(results, **kwargs):\n l = kwargs.get('logger')\n l.info(\n u'#{} Collect ADD.'.format(u'-' * 8)\n )\n\n l.info(\n u'#{} {} results from {} total items.'.format(\n u'-' * 12, len(results), sum([x['items_processed'] for x in results])\n )\n )\n \n final_result = sum([x['result'] for x in results])\n\n l.info(\n u'#{} Final result: {}.'.format(\n u'-' * 12, final_result\n )\n )\n\n return final_result", "def imap(self, iterable):\n def get_results():\n \"\"\"Get a result from the worker output queue and try to yield\n results back to the caller.\n\n This yields results back in the order of their associated tasks.\n \"\"\"\n self._recv_result() # blocks\n tasks = self._tasks_in_progress\n results = self._task_results_waiting\n\n for task_id in tasks.keys():\n if task_id not in results:\n break\n\n del tasks[task_id]\n result = results.pop(task_id)\n yield result.value\n\n for result in self._map_to_workers(iterable, get_results):\n yield result", "def collect(self):\n raise NotImplementedError(\"abstract\")", "def _extract_completed_runs_from_futures(self) -> None:\n\n # In code check to make sure we don;t exceed resource allocation\n if len(self.futures) > sum(self.client.nthreads().values()):\n warnings.warn(\"More running jobs than resources available \"\n \"Should not have more futures/runs in remote workers \"\n \"than the number of workers. This could mean a worker \"\n \"crashed and was not able to be recovered by dask. \"\n )\n\n # A future is removed to the list of futures as an indication\n # that a worker is available to take in an extra job\n done_futures = [f for f in self.futures if f.done()]\n for future in done_futures:\n self.results.append(future.result())\n self.futures.remove(future)", "def process(self):\n try:\n if not self._successor:\n return self.loading_strategy()\n else:\n return self._successor.process_next(self.loading_strategy())\n except Exception as e:\n Oprint.err(e, 'lmdo')" ]
[ "0.65392697", "0.6166918", "0.6131396", "0.61133945", "0.6108954", "0.6090083", "0.60769624", "0.60753834", "0.6058896", "0.6058896", "0.6026497", "0.60222393", "0.59910214", "0.59865344", "0.59636956", "0.5914493", "0.5898774", "0.5867254", "0.58663267", "0.5846436", "0.58463967", "0.5833686", "0.5783509", "0.5773999", "0.5773452", "0.576", "0.5728865", "0.5639006", "0.5625927", "0.5611847" ]
0.7560404
0
Start more processors if we have enough slots and files to process.
def start_new_processes(self): # initialize cache to mutualize calls to Variable.get in DAGs # needs to be done before this process is forked to create the DAG parsing processes. SecretCache.init() while self._parallelism - len(self._processors) > 0 and self._file_path_queue: file_path = self._file_path_queue.popleft() # Stop creating duplicate processor i.e. processor with the same filepath if file_path in self._processors: continue callback_to_execute_for_file = self._callback_to_execute[file_path] processor = self._create_process( file_path, self._pickle_dags, self._dag_ids, self.get_dag_directory(), callback_to_execute_for_file, ) del self._callback_to_execute[file_path] Stats.incr("dag_processing.processes", tags={"file_path": file_path, "action": "start"}) processor.start() self.log.debug("Started a process (PID: %s) to generate tasks for %s", processor.pid, file_path) self._processors[file_path] = processor self.waitables[processor.waitable_handle] = processor Stats.gauge("dag_processing.file_path_queue_size", len(self._file_path_queue))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def schdule(self):\n while self.queue:\n if self.processing >= self.maxProcessing:\n # We have reached the maximum number of parallel\n # tasks.\n break\n\n item, completeDeferred = self.queue.pop(0)\n\n self.processing += 1 \n self.start(item).addBoth(self.done).chainDeferred(completeDeferred)", "def start_processing(self):", "def run_pipelines(self):\n while True:\n # wait until a job is available or end has been signaled\n nodes_assigned = []\n no_more_pipelines = False\n with self.job_list_cv:\n while len(self.job_list) == 0:\n if not self._allow_new_pipelines:\n no_more_pipelines = True\n break\n self.job_list_cv.wait()\n\n if no_more_pipelines:\n self._join_running_pipelines()\n return\n\n # wait until nodes are available or quit has been signaled\n with self.free_cv:\n pipeline = self.job_list.pop_job(self.free_nodes)\n while pipeline is None:\n if not self._process_pipelines:\n break\n self.free_cv.wait()\n pipeline = self.job_list.pop_job(self.free_nodes)\n\n if self._process_pipelines:\n _log.debug(\"starting pipeline %s, free nodes %d -> %d\",\n pipeline.id, self.free_nodes,\n self.free_nodes - pipeline.get_nodes_used())\n self.free_nodes -= pipeline.get_nodes_used()\n\n # Get a list of node names from the allocated nodes and\n # assign it to the pipeline\n for i in range(pipeline.total_nodes):\n nodes_assigned.append(self.allocated_nodes.get())\n _log.debug(\"pipeline {0} allocated nodes {1}\".format(\n pipeline.id, nodes_assigned))\n\n if not self._process_pipelines:\n self._join_running_pipelines()\n return\n\n with self.pipelines_lock:\n pipeline.start(self, nodes_assigned, self.runner)\n self._running_pipelines.add(pipeline)\n if self._status is not None:\n self._status.set_state(pipeline.get_state())\n\n self._join_running_pipelines()", "def split_start(infiles, outfiles):\n\n # split always runs exactly one job (unlike @subdivide)\n # So it implicitly combines all its inputs before running and generating multiple output\n # @originate generates multiple output so the input for @split is a list...\n infile = infiles[0]\n\n # clean up previous\n for f in outfiles:\n os.unlink(f)\n\n\n #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n #\n # Create more files than the previous invocation\n #\n #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n n_to_produce = len(outfiles) + 1\n for i in range(n_to_produce):\n f = '{}{}.split'.format(tempdir, i)\n open(f, 'a').close()", "def _proc_collect(self) -> None:\n while True:\n self.process_num_threads.set(self._process.num_threads())\n self.process_memory_bytes.set(self._process.memory_info().rss)\n self.process_cpu_percent.set(self._process.cpu_percent())\n\n sleep(self.process_scrape_interval)", "def num_processes(self):\n return 1", "def fill(self):\n for _ in range(Pyro4.config.THREADPOOL_MINTHREADS):\n if not self.attemptSpawn():\n break", "def pre_process_resource(self):\n\n # get list of resource files\n files = [os.path.join(self.resource_folder, f) for f in os.listdir(self.resource_folder)]\n files = [f for f in files if os.path.isfile(f)]\n\n # raise error if no files found\n if not files:\n raise FileNotFoundError(\"No resource files found...\")\n\n # create preprocessed folder if it doesn't exist\n if not os.path.exists(self.pre_processed_folder):\n os.mkdir(self.pre_processed_folder)\n\n # record number of chunks from previous files\n chunk_hist = 0\n\n # open valid files\n for filename in files:\n\n self.file_size = os.path.getsize(filename)\n\n # check chunk size\n if self.file_size / self.chunk_size > 500:\n print(\"Warning, this will create {} partitions.\".format(int(self.file_size / self.chunk_size)))\n rmtree(self.pre_processed_folder)\n raise ValueError(\"This file will create more than 500 partitions, consider increasing the chunk size...\")\n\n # process\n pool = Pool(self.configs[\"n_processors\"])\n for _ in tqdm.tqdm(pool.imap(self.pre_process_chunk, self.generate_chunk(filename)), total=int(self.file_size / self.chunk_size)):\n pass", "def startMP(self):\n for w in self.consumers:\n w.start()", "def incAvailProcSlots(self):\n\n\t\t# Acquire a lock\n\t\tself.lock()\n\n\t\t# Read number of currently executing processes\n\t\tc0 = self.getAvailProcSlots()\n\n\t\tc1 = c0 + 1\n\t\tself.lockfile.seek(0)\n\t\tself.lockfile.write('%04d\\n' % c1)\n\t\tself.lockfile.flush()\n\t\t# Unlock semaphore\n\t\tself.unlock()", "def load_chunk(self, start): # TODO parallelize this whole process\n self.X = queue.Queue()\n n = 0 # number of loaded batches\n print('stop loading : %s' % self.stop_loading())\n print('start + n : %s' % str(start + n))\n while (not self.stop_loading()) and (start + n) < self.size:\n print('load')\n self.X.put(np.load(self.data_filenames[start+n]))\n n += 1\n print('return chunk')\n return n", "def num_processes():\n return 1", "def start_all(self):\n for proc in self.get_all():\n proc.start()", "def _prepare_reads_single_end(self):\n read_files_and_jobs = {}\n with concurrent.futures.ProcessPoolExecutor(\n max_workers=self._args.processes\n ) as executor:\n for lib_name, read_path, processed_read_path in zip(\n self._lib_names,\n self._pathcreator.read_paths,\n self._pathcreator.processed_read_paths,\n ):\n if not self._file_needs_to_be_created(processed_read_path):\n continue\n read_processor = ReadProcessor(\n poly_a_clipping=self._args.poly_a_clipping,\n min_read_length=self._args.min_read_length,\n fastq=self._args.fastq,\n min_phred_score=self._args.min_phred_score,\n adapter=self._args.adapter,\n reverse_complement=self._args.reverse_complement,\n )\n read_files_and_jobs[lib_name] = executor.submit( # run jobs parallel\n read_processor.process_single_end,\n read_path,\n processed_read_path,\n )\n self._evaluet_job_and_generate_stat_file(read_files_and_jobs)", "def startingNewStep(self):\n with self.__queueLock:\n self.__submittedJobs = []", "def can_fit_more(self):\n\n return len(self._requeue_jobs) < MAX_NUM", "def start_chunking(self) -> None:\n self._finished_chunking.clear()\n self._chunks_left = ceil(self.member_count / 1000)", "def start(self):\n self._queue = multiprocessing.Queue(maxsize=self.max_size)\n self._stop_event = multiprocessing.Event()\n\n for _ in range(self.num_workers):\n process = multiprocessing.Process(target=self._parallel_task)\n process.daemon = True\n self._processes.append(process)\n process.start()", "def start(self):\n self._queue = multiprocessing.Queue(maxsize=self.max_size)\n self._stop_event = multiprocessing.Event()\n\n for _ in range(self.num_workers):\n process = multiprocessing.Process(target=self._parallel_task)\n process.daemon = True\n self._processes.append(process)\n process.start()", "def send_jobs(self, call_limit=None):\n for f in self.splitter.split_file():\n # if using a call limit, break once call limit is reached\n if (call_limit is not None) and (self.fnum > call_limit):\n break\n self.maf = f\n self._jobfile()\n # print self.qsub_cmmd\n call(self.qsub_cmmd, shell=True)\n self.fnum += 1", "def parallel(files):\n return list(map(join_process, list(map(start_process, files))))", "def start_workers(self):\n\n for thread in self.threads:\n thread.start()", "def startProcessing(self, exportItems):\n super(WiretapShotProcessor, self).startProcessing(exportItems)", "def _spawn_workers(self):\n self._event.set()\n self._workers = [ClassifierWorker(self._event, self._queue, self._results) for x in range(self._NUM_WORKERS)]\n [worker.start() for worker in self._workers]", "def check_process_full(self) -> None:\n if len(self.process_queue) >= self.max_processes:\n task_name, sp = self.process_queue.pop()\n sp.wait()", "def start(self):\n self.register_exit_signals()\n\n set_new_process_group()\n\n self.log.info(\"Processing files using up to %s processes at a time \", self._parallelism)\n self.log.info(\"Process each file at most once every %s seconds\", self._file_process_interval)\n self.log.info(\n \"Checking for new files in %s every %s seconds\", self._dag_directory, self.dag_dir_list_interval\n )\n\n return self._run_parsing_loop()", "def available_processors(self, available_processors):\n\n self._available_processors = available_processors", "def _repopulate_pool(self):\n for i in range(self._processes - len(self._pool)):\n w = self.Process(target=worker,\n args=(self._inqueue, self._outqueue,\n self._initializer,\n self._initargs, self._maxtasksperchild,\n self._wrap_exception,\n self._finalizer,\n self._finalargs)\n )\n self._pool.append(w)\n w.name = w.name.replace('Process', 'PoolWorker')\n w.daemon = True\n w.start()\n util.debug('added worker')", "def process():\n print(OPTS)\n\n # Remove dataset files if they exist from before\n p = PATH.proc\n if p.exists():\n shutil.rmtree(p)\n p.mkdir()\n\n with multiprocessing.Pool(OPTS['cpus']) as pool:\n chunks = [0, 1] if OPTS['dev'] else range(100)\n partition_paths = pool.map(_process, chunks)\n\n fastparquet.writer.merge(partition_paths)", "def _dispatch_new(self):\n\n # This steps ensure that auto-baching works as expected.\n this_batch_duration = time.time() - self.dispatch_timestamp\n self.parallel._backend.batch_completed(self.batch_size,\n this_batch_duration)\n\n # Schedule the next batch of tasks.\n with self.parallel._lock:\n self.parallel.n_completed_tasks += self.batch_size\n self.parallel.print_progress()\n if self.parallel._original_iterator is not None:\n self.parallel.dispatch_next()" ]
[ "0.58189386", "0.5582102", "0.55208045", "0.5505154", "0.54729325", "0.5429797", "0.53998035", "0.53761727", "0.53390145", "0.53227335", "0.53051144", "0.52963483", "0.52729404", "0.5251894", "0.5228932", "0.51991844", "0.5195823", "0.5165046", "0.5165046", "0.51534986", "0.51511043", "0.51319635", "0.5107991", "0.51073617", "0.5104172", "0.50961554", "0.5079776", "0.5077968", "0.50656", "0.506375" ]
0.58833116
0
Kill any file processors that timeout to defend against process hangs.
def _kill_timed_out_processors(self): now = timezone.utcnow() processors_to_remove = [] for file_path, processor in self._processors.items(): duration = now - processor.start_time if duration > self._processor_timeout: self.log.error( "Processor for %s with PID %s started at %s has timed out, killing it.", file_path, processor.pid, processor.start_time.isoformat(), ) Stats.decr("dag_processing.processes", tags={"file_path": file_path, "action": "timeout"}) Stats.incr("dag_processing.processor_timeouts", tags={"file_path": file_path}) # Deprecated; may be removed in a future Airflow release. Stats.incr("dag_file_processor_timeouts") processor.kill() # Clean up processor references self.waitables.pop(processor.waitable_handle) processors_to_remove.append(file_path) stat = DagFileStat( num_dags=0, import_errors=1, last_finish_time=now, last_duration=duration, run_count=self.get_run_count(file_path) + 1, ) self._file_stats[processor.file_path] = stat # Clean up `self._processors` after iterating over it for proc in processors_to_remove: self._processors.pop(proc)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self):\n for tmp_file in filter(lambda x: x.exists(), self.temp_files):\n tmp_file.unlink()\n\n for proc in self.processes:\n try:\n os.kill(proc, signal.SIGTERM)\n except ProcessLookupError:\n pass", "def _AbortJoin(self, timeout=None):\n for pid, process in self._processes_per_pid.items():\n logger.debug('Waiting for process: {0:s} (PID: {1:d}).'.format(\n process.name, pid))\n process.join(timeout=timeout)\n if not process.is_alive():\n logger.debug('Process {0:s} (PID: {1:d}) stopped.'.format(\n process.name, pid))", "def kill_processes(self):\n for proc in self.processes:\n if proc['proc'].poll() is not None:\n proc['proc'].terminate()", "def terminate(self):\n for processor in self._processors.values():\n Stats.decr(\n \"dag_processing.processes\", tags={\"file_path\": processor.file_path, \"action\": \"terminate\"}\n )\n processor.terminate()", "def _AbortKill(self):\n for pid, process in self._processes_per_pid.items():\n if not process.is_alive():\n continue\n\n logger.warning('Killing process: {0:s} (PID: {1:d}).'.format(\n process.name, pid))\n self._KillProcess(pid)", "def _timeout_cbk(proc):\n proc.kill()\n raise RuntimeError(\"Timeout popped.\")", "def finalize(self):\n for p in self._processes:\n if p.join(30) is None and p.exitcode is None:\n p.kill()", "def kill_manager(self) -> None:\n\n for p in self.process_list:\n p.terminate()\n # NOTE: Seems Python does not appreciate if close is called too quickly.\n sleep(0.5)\n # Release the resources held by the Proess (Python 3.7 and up)\n p.close()", "def cleanup():\n for th in THREAD_REGISTER.values():\n th.exit()\n th.join(timeout=3)", "def kill_process_family(pid, exit_code=None, timeout=None):\n\n if timeout is not None:\n end_time = time.time() + timeout\n else:\n end_time = None\n while True:\n children = get_child_pids(pid)\n if not children:\n break\n if end_time is not None and time.time() >= end_time:\n raise TimeoutError(\"Unable to kill child processes.\")\n for child in children:\n kill_process_family(child, exit_code)\n kill_process(pid, exit_code)", "def abort(self):\n if self.processes is None:\n return\n\n for p in self.processes:\n if p.poll() is None:\n p.terminate()\n try:\n p.wait(timeout=2)\n except subprocess.TimeoutExpired:\n p.kill()\n # Don't catch the TimeoutExpired exception as\n # wait should return immediately after the process\n # was killed. If this wait times out just let\n # the exception terminate the execution as\n # something has serriously gone wrong if the\\\n # process is still running.\n p.wait(timeout=5)", "def _kill_running_processes(self):\n # Kill any rouge processes that are still running.\n with _thread_lock:\n killed = []\n for pid in self._pids:\n try:\n os.kill(pid, _KILLED_BY_ANYPYTOOLS)\n killed.append(str(pid))\n except:\n pass\n self._pids.clear()", "def force_stop(self):\n\n # Stopping thread\n self.quit()\n\n # Killing all running processes\n ProcessManager(self.cf_process).close_all_child()\n ProcessManager(self.server_process).close_all_child()", "def _kill_launchfile(self):\r\n if len(self.process_list) is 0:\r\n print(\"[ToyCarEpisodeMonitor._terminate()]: no process to terminate\")\r\n else:\r\n for p in self.process_list:\r\n p.send_signal(signal.SIGINT)\r\n while p.poll() is None:\r\n print (\r\n \"[SimulatorEpisodeMonitor._terminate()]: \"\r\n \"simulator process {} termination in progress...\"\r\n ).format(p.pid)\r\n time.sleep(1.0)\r\n print (\r\n \"[ToyCarEpisodeMonitor._terminate()]: \"\r\n \"simulator proc {} terminated with exit code {}\"\r\n ).format(p.pid, p.returncode)\r\n self.process_list = []\r\n print(\"[ToyCarEpisodeMonitor]: termination done!\")\r\n\r\n return", "def kill_all():\n compose_kill_all()", "def killJobs(self):\n self.worker_pool.close()\n self.status_pool.close()\n self.failure = True\n for job in self.active:\n try:\n job.killJob()\n except AttributeError:\n raise SchedulerError('killJob method is not defined')\n except: # Job already terminated\n pass\n self.job_queue_count = 0", "def timer_ffmpeg_process_timeout():\n try:\n if not self.ffmpeg_process_ps.is_alive():\n timer_ffmpeg_process.stop()\n self.w.hide()\n del (self.w)\n self.ffmpeg_process_ps = None\n except:\n pass", "def kill(self):\n self.proc.kill()\n self.proc.wait()\n self.thread.join()", "def shutdown(self):\n for i in range(len(self.processes)):\n self.processes[i].terminate()\n self.processes[i].join()\n self.inner.readers[i].close()\n self.inner.writers[i].close()\n self.chunk_process.terminate()\n self.chunk_process.join()", "def test_stopProcessForcedKill(self):\r\n self.pm.startService()\r\n self.pm.addProcess(\"foo\", [\"foo\"])\r\n self.assertIn(\"foo\", self.pm.protocols)\r\n self.reactor.advance(self.pm.threshold)\r\n proc = self.pm.protocols[\"foo\"].transport\r\n # Arrange for the fake process to live longer than the killTime\r\n proc._terminationDelay = self.pm.killTime + 1\r\n self.pm.stopProcess(\"foo\")\r\n # If process doesn't die before the killTime, procmon should\r\n # terminate it\r\n self.reactor.advance(self.pm.killTime - 1)\r\n self.assertEqual(0.0, self.pm.timeStarted[\"foo\"])\r\n\r\n self.reactor.advance(1)\r\n # We expect it to be immediately restarted\r\n self.assertEqual(self.reactor.seconds(), self.pm.timeStarted[\"foo\"])", "def cleanup():\n dist.destroy_process_group()", "def kill_children(timeout=1) -> List[psutil.Process]:\n procs = child_manager.children_pop_all()\n for p in procs:\n try:\n p.terminate()\n except psutil.NoSuchProcess:\n pass\n gone, alive = psutil.wait_procs(procs, timeout=timeout)\n for p in alive:\n logger.warning(\"Cleaning up child: %d\", p.pid)\n p.kill()\n return alive", "def kill(self):\n self._stop_proc(signal.SIGKILL)", "def cleanup_manager(self) -> None:\n \n for p in self.process_list:\n if p.is_alive():\n p.terminate()\n sleep(1)\n p.close()", "def kill(self):\n\n #Kill relevant process names\n if self.driver_type != 'firefox_wdm':\n os.system('pkill -f chrome')\n os.system('pkill -f Chrome')\n os.system('pkill -f chromedriver')\n else:\n os.system('pkill -f FireFox')\n #TODO: confirm this -> os.system('pkill -f geckodriver')", "def _AbortTerminate(self):\n for pid, process in self._processes_per_pid.items():\n if not process.is_alive():\n continue\n\n logger.warning('Terminating process: {0:s} (PID: {1:d}).'.format(\n process.name, pid))\n process.terminate()", "def kill():\n Log.info(\"Kill tns processes.\")\n if Settings.HOST_OS == OSType.WINDOWS:\n Process.kill(proc_name='node')\n else:\n Process.kill(proc_name='node', proc_cmdline=Settings.Executables.TNS)\n Process.kill_by_commandline(cmdline='webpack.js')", "def _stopProcessSet(procSet):\n # Send a SIGTERM to all (still running) processes.\n finished = {}\n needToWait = False\n for i, p in enumerate(procSet):\n if p.poll() is not None:\n finished[p] = None\n continue\n\n needToWait = True\n try:\n if platformType == \"windows\":\n win32process.TerminateProcess(p._handle, 0)\n else:\n os.kill(p.pid, signal.SIGTERM)\n if i == 0:\n children = getattr(p, \"children\", [])\n for cpid in children:\n os.kill(cpid, signal.SIGTERM)\n except OSError:\n # This can happen if the process has died before the call to kill, so\n # we ignore it.\n pass\n\n if needToWait:\n # At least one process has been signalled, so wait for about\n # _stopProcessTimeout * 0.1 seconds or until all the processes have\n # died.\n for i in range(_stopProcessTimeout):\n done = True\n for p in procSet:\n # print(\">>\", p.poll())\n if p.poll() is not None:\n finished[p] = None\n continue\n done = False\n\n if done:\n break\n else:\n time.sleep(0.1)\n\n # Now use SIGKILL on any processes still running.\n for p in procSet:\n if p not in finished:\n try:\n if platformType == \"windows\":\n win32process.TerminateProcess(p._handle, 0)\n else:\n os.kill(p.pid, signal.SIGKILL)\n except OSError:\n # Process may have died before the call to kill.\n pass\n\n # Wait again for all the processes to die. If they do not then\n # something really horrid has happened.\n for i in range(_stopProcessTimeout):\n done = True\n for p in procSet:\n if p.poll() is not None:\n finished[p] = None\n continue\n done = False\n\n if done:\n break\n else:\n time.sleep(0.1)\n\n for p in procSet:\n if p.poll() is None:\n print(\"Heck! Could not stop process with ID = %d\" % p.pid)\n\n # Clear the list of processes.\n procSet[:] = []", "def kill(self):\n self.running = False\n\n if self.pool is not None:\n self.pool.close()\n# self.pool.terminate()\n# self.pool.join()\n self.pool = None\n\n del self.thread\n\n for i in xrange(len(self.tasks)):\n if (self.tasks[i].status == 'queued' or\n self.tasks[i].status == 'processing'):\n self.tasks[i].set_killed()", "def kill(self, timeout: float=1.0) -> None:\n try:\n assert(self._pid is not None)\n assert(self._thread is not None)\n os.kill(self._pid, signal.SIGKILL)\n self._thread.join(timeout=timeout)\n except OSError as err:\n # print(str(err), file=sys.stderr)\n pass" ]
[ "0.64615154", "0.63152456", "0.62957335", "0.60748655", "0.6066558", "0.60211766", "0.5990371", "0.5971625", "0.5948777", "0.5891773", "0.58864486", "0.5882023", "0.58638203", "0.5838313", "0.577172", "0.5762323", "0.5740598", "0.57361495", "0.5653952", "0.564816", "0.5636479", "0.5635712", "0.56323755", "0.56248605", "0.56227183", "0.5620034", "0.56038743", "0.5586878", "0.5570125", "0.5566974" ]
0.816324
0
Add stuff to the back or front of the file queue, unless it's already present.
def _add_paths_to_queue(self, file_paths_to_enqueue: list[str], add_at_front: bool): new_file_paths = list(p for p in file_paths_to_enqueue if p not in self._file_path_queue) if add_at_front: self._file_path_queue.extendleft(new_file_paths) else: self._file_path_queue.extend(new_file_paths) Stats.gauge("dag_processing.file_path_queue_size", len(self._file_path_queue))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enqueue_front(self, item):\n self._items.insert(0, item)", "def bypass_queue(self, name):\n # self.queue = [name] + self.queue\n # self.queue.insert(0, name)\n\n # self.lst = [name] + self.lst # This person is brought to the front of the queue\n self.lst.insert(0, name) #Not constant time as the pointer is moved for all the members of the queue, 0(n)\n print(f\"{name} has bypassed the queue\")", "def add(self, data):\n wasquiet = True if (self.tail == self.curr) else False\n\n # Assert the queue is clean\n qtail = self.base + \".\" + str(self.tail)\n print \"creating %s\" % qtail\n assert not os.path.exists(qtail)\n qt = open(qtail, \"w\")\n qt.write(data)\n qt.close()\n\n # Where does the next item go\n self.tail += 1\n self._settail(self.tail)\n\n return wasquiet", "def add_existing_subdir_to_queue(cls):\n\n if FolderQueue.is_empty():\n for dirpath, _, _ in os.walk(top=cls.path, topdown=True):\n FolderQueue.fila.push(dirpath)", "def _producer(self) -> None:\n while (gtex_path := self.gtex.pop(0)) is not None and (\n bm_path := self.bm.pop(0)\n ) is not None:\n data = merge_data(gtex_path, bm_path, self.mane)\n self._q.put(data)\n logger.info(f\"Contents of file {gtex_path} added to queue\")\n else:\n self._q.put(None) # Send end signal to consumer\n logger.info(\"All files added. None signal sent. Producer returns\")\n return", "def _add_to_queue(self, tok):\n if self._genpostfix:\n self._queue.append(tok)", "def add_prev(self, item, index):\n if index in self.d_buffer.keys():\n return\n if len(self) == self._size:\n self.pop_last()\n self.add_item(item, index)", "def _add_to_chat_queue(self, message):\n self.chat_message_queue.appendleft(message)", "def enqueue(self, item):\n\t\tself.items.insert(0, item)", "def on_queue_next_command(self, event, index):\n self.pre_check(event)\n self.same_channel_check(event)\n if 1 < index <= len(self.get_player(event.guild.id).queue):\n index -= 1\n self.get_player(event.guild.id).queue.insert(\n 0,\n self.get_player(event.guild.id).queue.pop(index),\n )\n ytdata = self.get_ytdl_values(\n self.get_player(event.guild.id).queue[0].metadata,\n )\n api_loop(\n event.channel.send_message,\n \"Moved ``{}`` to the front of the queue.\".format(\n ytdata[\"title\"],\n ytdata[\"uploader\"],\n ytdata[\"time_formated\"],\n ytdata[\"source\"],\n ),\n )\n else:\n api_loop(event.channel.send_message, \"Invalid index input.\")", "def queue(self, irc, msg, args, notice):\n pos = self._find_in_queue(msg.nick)\n QUEUE_SLOTS = self.registryValue('queueSlots')\n if pos < 0:\n if QUEUE_SLOTS >= 0 and self._count >= QUEUE_SLOTS:\n irc.reply(\"Sorry, but the queue is out of slots\")\n return\n self._queue.append((msg.nick, notice))\n self._count += 1\n irc.reply(\"I queued you at position %s in the queue\" % len(self._queue))\n self._dump_queue()\n elif self._queue[pos][1] != notice:\n self._queue[pos] = (msg.nick, notice)\n irc.reply(\"You're queued at position %s already, I've updated \"\\\n \"notice to '%s'\" % (pos + 1, notice))\n self._dump_queue()\n else:\n irc.reply(\"You're already in the queue at position %s.\" % (pos+1))", "def enqueue(self, item):\n self.items.insert(0, item)", "def enqueue(self, item):\n self.items.insert(0, item)", "def enqueue_content(queue, p_title, content, image):\n newp = ContentInfo(p_title, content, image, None)\n if queue.front is None:\n queue.front = newp\n else:\n queue.back.rest = newp\n queue.back = newp", "def add_queue(self, queue):\n with self.mutex:\n self.queues.append(queue)", "def push(self, item):\n super().add_item_to_front(item)", "def bypass_queue(self, name):\n self.deque.appendleft(name) #Not constant time as the pointer is moved for all the members of the queue, 0(n)\n print(f\"{name} has bypassed the deque\")", "def add_to_queue(self, msg):\n if not self.queue.full():\n self.queue.put(msg)", "def __add_recent_file(self, fname):\r\n if fname is None:\r\n return\r\n if not fname in self.recent_files:\r\n self.recent_files.insert(0, fname)\r\n if len(self.recent_files) > 9:\r\n self.recent_files.pop(-1)", "def add_front(self, item):\n\n self.items.insert(0, item)", "def add(self, element):\n\n if self.style == 'FIFO': # If FIFO, append element to end of list\n self.queue.append(element)\n\n elif self.style == 'LIFO': # If LIFO, append element to front of list\n self.queue.insert(0, element)", "def enqueue(self, item):\n self.__queue.insert(0, item)", "def enqueue(self, item):\n self.queue.append(item)", "def enqueue(self, item):\n self.queue.append(item)", "def queue_append(self, obj, value):\n self.queue.append((obj, value))\n if len(self.queue) > self.queue_size:\n self.dump_queue()", "def dequeue_content(queue):\n if queue.front is None:\n raise IndexError\n queue.front = queue.front.rest\n if queue.front is None:\n queue.back = None", "async def queue(self, msg, song):\n title1 = await Downloader.get_info(self, url=song)\n title = title1[0]\n data = title1[1]\n # NOTE:needs fix here\n if data['queue']:\n await self.playlist(data, msg)\n # NOTE: needs to be embeded to make it better output\n return await msg.send(f\"Added playlist {data['title']} to queue\")\n self.player[msg.guild.id]['queue'].append(\n {'title': title, 'author': msg})\n return await msg.send(f\"**{title} added to queue**\".title())", "def enqueue_rear(self, item):\n self._items.append(item)", "def add(self, item: T) -> None:\n self._queue.append(item)\n if not self.is_empty():\n self._queue.sort(reverse=True)", "def _enqueue(self, page):\n self._queue.append(page)" ]
[ "0.63460886", "0.6313469", "0.6261927", "0.6121886", "0.60842025", "0.60755575", "0.6025526", "0.600535", "0.59981894", "0.5954292", "0.5923852", "0.59160674", "0.59160674", "0.59157324", "0.5904151", "0.5899629", "0.58940136", "0.5866215", "0.58652383", "0.58476686", "0.5843139", "0.5826011", "0.57984835", "0.57984835", "0.5797406", "0.57732034", "0.57283795", "0.57088107", "0.5699939", "0.56878567" ]
0.65448594
0
Stop all running processors.
def terminate(self): for processor in self._processors.values(): Stats.decr( "dag_processing.processes", tags={"file_path": processor.file_path, "action": "terminate"} ) processor.terminate()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stop(self):\n for process in self.process:\n process.stop()", "def stop_all():\n\twhile _running:\n\t\t_running[0].stop(noerror=True)", "def stop_all_instances(self):\n print '# Stopping all the instances'\n number = self.compute.stop_all_instances()\n print '%d instances were stopped' % number", "def stop(self):\n for module in self.asynchronous:\n module.stop()", "def _stop_all(self):\n # LEDs\n self.cam_led.off\n self.analysis_led[0].off\n self.analysis_led[1].off\n self.error.off\n \n # motors\n self.motor.stop()\n self.wash.stop()", "def stop(self):\n for task in self._tasks:\n task.stop()", "def stopworkerthreads():\n global PROCESSES\n for proc in PROCESSES:\n proc.stop()\n proc.join()", "def stop (self):\n for i in xrange(self.numpools):\n numworkers = self.numworkerslist[i]\n for j in xrange(numworkers):\n self.queues[i].put('__STOP__')", "def stop_all():\n global exporter_objects\n logging.info(\"Starting shutdown of all threads.\")\n for _task in autorx.task_list.keys():\n try:\n autorx.task_list[_task]['task'].stop()\n except Exception as e:\n logging.error(\"Error stopping task - %s\" % str(e))\n\n for _exporter in exporter_objects:\n try:\n _exporter.close()\n except Exception as e:\n logging.error(\"Error stopping exporter - %s\" % str(e))", "def kill_all(self) -> None:\n for i in self.ist:\n i.stop_stream()\n i.close()\n for o in self.ost:\n o.stop_stream()\n o.close()", "def kill_processes(self):\n for proc in self.processes:\n if proc['proc'].poll() is not None:\n proc['proc'].terminate()", "def stopworkerthreads():\n for proc in PROCESSES:\n print 'found worker'\n proc.stop()\n for cvproc in CVPROCESSES:\n print 'opencv found worker'\n cvproc.stop()", "def terminate_all(self):\n self._stop_all('terminate')", "def kill_all(self):\n self._stop_all('kill')", "def stopProcesses(*args):\n _stopProcessSet(_running)", "def stop(self):\n for worker in self.workers:\n import sys; sys.stdout.flush()\n try: worker.exec_code('import sys;sys.exit(0)')\n except:\n #should really do something here to\n # trap non-SystemExit errors.\n pass", "def stop(self) -> None:\n for instance in self.instances:\n instance.listener = None\n instance.stop()", "def stopall(self):\n\n for i in self.bots:\n try:\n i.stop()\n except:\n pass", "def stop_all(self, signal, frame):\n for event in self.event_list:\n event.set()\n for process in self.process_list:\n process.join()\n sys.exit()", "def stop(self, *args):\n if args[0] == 'all':\n for k, v in self.processers.items():\n if v:\n try:\n v.terminate()\n except:\n pass\n print 'Killed %s.' % k\n\n self.processers = dict.fromkeys(self.processers.keys())\n else:\n seq = args[0]\n try:\n self.processers['process%s' % seq].terminate()\n self.processers['process%s' % seq] = None\n print 'Killed process%s.' % seq\n except:\n print 'Have no process%s.' % seq", "def _terminateAll(self):\n\n # Termination of all processes\n try :\n for process in self.processes:\n process.terminate()\n except AttributeError:\n pass\n\n return", "def stop(self):\n if self._is_running():\n self._stop_event.set()\n\n for process in self._processes:\n if process.is_alive():\n os.kill(process.pid, signal.SIGINT)\n process.join()\n\n if self._queue is not None:\n self._queue.close()\n\n self._queue = None\n self._stop_event = None\n self._processes = []", "def stop(self):\n if self._is_running():\n self._stop_event.set()\n\n for process in self._processes:\n if process.is_alive():\n os.kill(process.pid, signal.SIGINT)\n process.join()\n\n if self._queue is not None:\n self._queue.close()\n\n self._queue = None\n self._stop_event = None\n self._processes = []", "def stop_all_nodes(self):\n for node in self.nodes:\n if node.running():\n node.stop()", "def stop(self):\r\n for srv in self._servers:\r\n srv.stop()", "def restartAll(self):\n for name in self.processes:\n self.stopProcess(name)", "def mux_stopall(): \r\n # Map this close to all existing multiplexers\r\n for (key, mux) in MULTIPLEXER_OBJECTS.items():\r\n mux.close()\r\n del MULTIPLEXER_OBJECTS[key]\r\n \r\n # Stop all underlying waitforconns\r\n for key in MULTIPLEXER_WAIT_HANDLES.keys():\r\n # Map stopcomm to each key\r\n mux_stopcomm(key)\r\n \r\n # Remove all the wait functions\r\n for key in MULTIPLEXER_WAIT_FUNCTIONS.keys():\r\n mux_virtual_stopcomm(key)", "def shutdown(self) -> None:\n for worker in self.remote_workers:\n worker.shutdown.remote()\n worker.__ray_terminate__.remote()", "def stop(self):\n if not self.is_started:\n raise RuntimeError(\"Cannot call stop() before start()\")\n\n for pid in self._processes.keys():\n self._kill_process(pid)\n\n self._processes = None\n self._task_queue = None\n self._result_queue = None\n self._task_registry = None\n self._tasks_in_progress = None\n self._task_results_waiting = None", "def killall(self):\n\n for job_id, job in self.jobs:\n backend.kill( job )" ]
[ "0.8106002", "0.77567285", "0.7552621", "0.75202745", "0.7439674", "0.7399022", "0.7375506", "0.7342175", "0.733424", "0.73039013", "0.7276026", "0.7195019", "0.7175661", "0.7161969", "0.7149045", "0.7115749", "0.7114294", "0.70981437", "0.70830184", "0.70610476", "0.7045887", "0.7041499", "0.7041499", "0.7030804", "0.70080596", "0.7006033", "0.700223", "0.6993868", "0.69907343", "0.6926978" ]
0.7778282
1
Emit metrics about dag parsing summary. This is called once every time around the parsing "loop" i.e. after all files have been parsed.
def emit_metrics(self): parse_time = time.perf_counter() - self._parsing_start_time Stats.gauge("dag_processing.total_parse_time", parse_time) Stats.gauge("dagbag_size", sum(stat.num_dags for stat in self._file_stats.values())) Stats.gauge( "dag_processing.import_errors", sum(stat.import_errors for stat in self._file_stats.values()) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _log_file_processing_stats(self, known_file_paths):\n # File Path: Path to the file containing the DAG definition\n # PID: PID associated with the process that's processing the file. May\n # be empty.\n # Runtime: If the process is currently running, how long it's been\n # running for in seconds.\n # Last Runtime: If the process ran before, how long did it take to\n # finish in seconds\n # Last Run: When the file finished processing in the previous run.\n headers = [\"File Path\", \"PID\", \"Runtime\", \"# DAGs\", \"# Errors\", \"Last Runtime\", \"Last Run\"]\n\n rows = []\n now = timezone.utcnow()\n for file_path in known_file_paths:\n last_runtime = self.get_last_runtime(file_path)\n num_dags = self.get_last_dag_count(file_path)\n num_errors = self.get_last_error_count(file_path)\n file_name = os.path.basename(file_path)\n file_name = os.path.splitext(file_name)[0].replace(os.sep, \".\")\n\n processor_pid = self.get_pid(file_path)\n processor_start_time = self.get_start_time(file_path)\n runtime = (now - processor_start_time) if processor_start_time else None\n last_run = self.get_last_finish_time(file_path)\n if last_run:\n seconds_ago = (now - last_run).total_seconds()\n Stats.gauge(f\"dag_processing.last_run.seconds_ago.{file_name}\", seconds_ago)\n\n rows.append((file_path, processor_pid, runtime, num_dags, num_errors, last_runtime, last_run))\n\n # Sort by longest last runtime. (Can't sort None values in python3)\n rows.sort(key=lambda x: x[3] or 0.0)\n\n formatted_rows = []\n for file_path, pid, runtime, num_dags, num_errors, last_runtime, last_run in rows:\n formatted_rows.append(\n (\n file_path,\n pid,\n f\"{runtime.total_seconds():.2f}s\" if runtime else None,\n num_dags,\n num_errors,\n f\"{last_runtime:.2f}s\" if last_runtime else None,\n last_run.strftime(\"%Y-%m-%dT%H:%M:%S\") if last_run else None,\n )\n )\n log_str = (\n \"\\n\"\n + \"=\" * 80\n + \"\\n\"\n + \"DAG File Processing Stats\\n\\n\"\n + tabulate(formatted_rows, headers=headers)\n + \"\\n\"\n + \"=\" * 80\n )\n\n self.log.info(log_str)", "def get_graph_summary(self):\n\n pass", "def _printSummary(self):\n\t\t### COP OUT\n\t\tif self.params['background'] is True:\n\t\t\tself.stats['count'] += 1\n\t\t\treturn\n\n\t\t### THIS NEEDS TO BECOME MUCH MORE GENERAL, e.g. Peaks\n\t\ttdiff = time.time()-self.stats['startseries']\n\t\tif not self.params['continue'] or tdiff > 0.1:\n\t\t\tcount = self.stats['count']\n\t\t\t#if(count != self.stats['lastcount']):\n\t\t\tsys.stderr.write(\"\\n\\tSUMMARY: \"+self.functionname+\"\\n\")\n\t\t\tself._printLine()\n\t\t\tsys.stderr.write(\"\\tTIME: \\t\"+apDisplay.timeString(tdiff)+\"\\n\")\n\t\t\tself.stats['timesum'] = self.stats['timesum'] + tdiff\n\t\t\tself.stats['timesumsq'] = self.stats['timesumsq'] + (tdiff**2)\n\t\t\ttimesum = self.stats['timesum']\n\t\t\ttimesumsq = self.stats['timesumsq']\n\t\t\tif(count > 1):\n\t\t\t\ttimeavg = float(timesum)/float(count)\n\t\t\t\ttimestdev = math.sqrt(float(count*timesumsq - timesum**2) / float(count*(count-1)))\n\t\t\t\ttimeremain = (float(timeavg)+float(timestdev))*self.stats['seriesleft']\n\t\t\t\tsys.stderr.write(\"\\tAVG TIME: \\t\"+apDisplay.timeString(timeavg,timestdev)+\"\\n\")\n\t\t\t\t#print \"\\t(- TOTAL:\",apDisplay.timeString(timesum),\" -)\"\n\t\t\t\tif(self.stats['seriesleft'] > 0):\n\t\t\t\t\tsys.stderr.write(\"\\t(- REMAINING TIME: \"+apDisplay.timeString(timeremain)+\" for \"\n\t\t\t\t\t\t+str(self.stats['seriesleft'])+\" series -)\\n\")\n\t\t\t#print \"\\tMEM: \",(mem.active()-startmem)/1024,\"M (\",(mem.active()-startmem)/(1024*count),\"M)\"\n\t\t\tself.stats['count'] += 1\n\t\t\tself._printLine()", "def run_all(self):\n self.formatter.section_start('Scratch Memory Info')\n self.formatter.section_start('Per priority')\n self.analyse_per_priority()\n self.formatter.section_end()\n self.formatter.section_start('Per task')\n self.analyse_per_task()\n self.formatter.section_end()\n self.formatter.section_end()", "def summary(self):\n\t\tself.writer = tf.summary.FileWriter(\n\t\t\t\t'./graphs/AttNCF', tf.get_default_graph())\n\t\twith tf.name_scope(\"summaries\"):\n\t\t\ttf.summary.scalar('loss', self.loss)\n\t\t\tself.summary_op = tf.summary.merge_all()", "def summarize(self):\n # NOTE: should be moved to abstract superclass\n failcount = len(self.mosaictrees) - len(self)\n msg = \"Parsed %i mosaics from the FluoView project.\\n\\n\" % len(self)\n if failcount > 0:\n msg += (\n \"\\n==== WARNING ====== WARNING ====\\n\\n\"\n \"Parsing failed on %i mosaic(s). Missing files?\\n \"\n \"\\n==== WARNING ====== WARNING ====\\n\\n\\n\" % failcount\n )\n for mos in self:\n msg += \"Mosaic %i: \" % mos.supplement[\"index\"]\n msg += \"%i x %i tiles, \" % (mos.dim[\"X\"], mos.dim[\"Y\"])\n msg += \"%.1f%% overlap.\\n\" % mos.get_overlap()\n return msg", "def summarize(self):\n # NOTE: should be moved to abstract superclass\n failcount = len(self.mosaictrees) - len(self)\n msg = \"Parsed %i mosaics from the FluoView project.\\n\\n\" % len(self)\n if failcount > 0:\n msg += (\n \"\\n==== WARNING ====== WARNING ====\\n\\n\"\n \"Parsing failed on %i mosaic(s). Missing files?\\n \"\n \"\\n==== WARNING ====== WARNING ====\\n\\n\\n\" % failcount\n )\n for mos in self:\n msg += \"Mosaic %i: \" % mos.supplement[\"index\"]\n msg += \"%i x %i tiles, \" % (mos.dim[\"X\"], mos.dim[\"Y\"])\n msg += \"%.1f%% overlap.\\n\" % mos.get_overlap()\n return msg", "def print_summary(self):\n self.network.print_summary()", "def add_summary(self):\n self.merged = tf.summary.merge_all()\n print(self.merged)\n self.file_writer = tf.summary.FileWriter(self.config.graph_output, self.sess.graph)", "def run_report(self) -> None:\n t1 = self.t1 or time.time()\n\n dt = t1 - self.t0\n\n if dt and self.max_tasks:\n speed = len(self.statistics) / dt / self.max_tasks\n else:\n speed = 0\n\n LOGGER.info('CRAWLER STATISTICS REPORT')\n\n show = list(self.statistics)\n show.sort(key=lambda stat: str(stat.url))\n\n for stat in show:\n self.log_url_metadata(stat)\n\n LOGGER.info(\n f'Completed parsing {len(self.statistics)} urls in {dt} secs; (max_tasks={self.max_tasks}) ({speed} urls per second per task)', # pylint: disable=C0301 # noqa: E501\n )\n\n LOGGER.info(f'Remaining: {self.queue.qsize()}')\n LOGGER.info(f'Total Statistics: {len(self.statistics)}')\n LOGGER.info(f'Datetime: {time.ctime()} local time')", "def summarize(self):\n info(\"Running \" + self.title + \" generator\")", "def report(self):\n\n job_summary = {}\n for job in self._jobs:\n \n if job.step_name not in job_summary:\n job_summary[ job.step_name ] = {}\n job_summary[ job.step_name ][ 'DONE' ] = 0\n job_summary[ job.step_name ][ 'RUNNING' ] = 0\n job_summary[ job.step_name ][ 'QUEUING' ] = 0\n job_summary[ job.step_name ][ 'FAILED' ] = 0\n job_summary[ job.step_name ][ 'UNKNOWN' ] = 0\n job_summary[ job.step_name ][ 'max_mem' ] = 0\n job_summary[ job.step_name ][ 'cputime' ] = 0\n\n if job.status == Job_status.FINISHED:\n job_summary[ job.step_name ][ 'DONE' ] += 1\n if job.cputime is not None:\n job_summary[ job.step_name ]['cputime'] += int(job.cputime)\n\n if job.max_memory is not None and job.max_memory > job_summary[ job.step_name ][ 'max_mem']:\n job_summary[ job.step_name ][ 'max_mem'] = int(job.max_memory)\n\n elif job.status == Job_status.RUNNING:\n job_summary[ job.step_name ][ 'RUNNING' ] += 1\n elif job.status == Job_status.QUEUEING or job.status == Job_status.SUBMITTED:\n job_summary[ job.step_name ][ 'QUEUING' ] += 1\n elif job.status == Job_status.FAILED or job.status == Job_status.NO_RESTART:\n job_summary[ job.step_name ][ 'FAILED' ] += 1\n else:\n job_summary[ job.step_name ][ 'UNKNOWN' ] += 1\n\n\n\n local_time = strftime(\"%d/%m/%Y %H:%M\", time.localtime())\n \n\n pickle_file = \"{}.{}\".format(self.pipeline.project_name, self.pipeline._pid)\n\n print(\"[{} @{} {}]\".format( local_time,self.pipeline._hostname , pickle_file))\n\n print(\"{:20} || {:12} || {:12} || {:2s} {:2s} {:2s} {:2s} {:2s}\".format(\"Run stats\", \"Runtime\", \"Max Mem\", \"D\",\"R\",\"Q\",\"F\",\"U\"))\n\n for step in sorted(self.pipeline._workflow._analysis_order, key=self.pipeline._workflow._analysis_order.__getitem__):\n if step not in job_summary:\n continue\n\n print(\"{:20} || {:12} || {:12} || {:02d}/{:02d}/{:02d}/{:02d}/{:02d}\".format(step, \n self.format_time(job_summary[ step ]['cputime']),\n self.format_memory(job_summary[ step ]['max_mem']),\n job_summary[ step ][ 'DONE' ],\n job_summary[ step ][ 'RUNNING' ],\n job_summary[ step ][ 'QUEUING' ],\n job_summary[ step ][ 'FAILED' ],\n job_summary[ step ][ 'UNKNOWN' ]))", "def melon_count_summary(day_number, path):\n print(\"Day\", day_number)\n the_file = open(path)\n for line in the_file:\n line = line.rstrip()\n words = line.split('|')\n print(\"Delivered {} {}s for total of ${}\".format(words[1], words[0], words[2]))\n the_file.close()", "def generateMigrateTaskStatistics(self, taskTree, fn):\n self._getTaskStatistics(taskTree, self.parent)\n for key in self.taskStat:\n elapsedTime = time.strftime(\"%H:%M:%S\", time.gmtime((self.taskStat[key]['EndedTS'] - self.taskStat[key]['StartedTS']).total_seconds()))\n convertedStartTS = self.taskStat[key]['StartedTS'].replace(tzinfo=self.from_zone).astimezone(self.to_zone)\n convertedEndTS = self.taskStat[key]['EndedTS'].replace(tzinfo=self.from_zone).astimezone(self.to_zone)\n self.taskStat[key]['StartedTS'] = datetime.strftime(convertedStartTS, \"%Y-%m-%dT%H:%M.%S\")\n self.taskStat[key]['EndedTS'] = datetime.strftime(convertedEndTS, \"%Y-%m-%dT%H:%M.%S\")\n self.taskStat[key]['Elapsed'] = elapsedTime\n with open(fn, 'a+b') as fp:\n data = [['Task Group Name', 'Started Time', 'Ended Time', 'Elapsed Time', 'SubTask#', 'Task State']]\n logger.info('Generating migration task report', also_console=True)\n for i in range(len(self.groupingResShort)):\n if self.groupingResShort[i] in self.taskStat.keys():\n data.append([self.taskStat[self.groupingResShort[i]]['Description'],\n self.taskStat[self.groupingResShort[i]]['StartedTS'],\n self.taskStat[self.groupingResShort[i]]['EndedTS'],\n self.taskStat[self.groupingResShort[i]]['Elapsed'],\n str(self.taskStat[self.groupingResShort[i]]['count']),\n self.taskStat[self.groupingResShort[i]]['State']])\n maxRow = [max(map(len, col)) for col in zip(*data)]\n header = \" \".join((' ' + val.ljust(maxLength) + ' ' for val, maxLength in zip(data[0], maxRow)))\n fp.write(\"-\" * len(header) + '\\r\\n')\n fp.write(header + '\\r\\n')\n logger.info(header, also_console=True)\n fp.write(\"-\" * len(header) + '\\r\\n')\n data.remove(data[0])\n for row in data:\n tablerow = \" \".join((' ' + val.ljust(maxLength) + ' ' for val, maxLength in zip(row, maxRow)))\n fp.write(tablerow + '\\r\\n')\n logger.info(tablerow, also_console=True)\n fp.write(\"-\" * len(header) + '\\r\\n\\r\\n')\n logger.info('Completed generating migration task report %s' % fn)\n return self.taskStat", "def execute_summary(self, step):\n with self.summary_writer.as_default():\n tf.summary.scalar('bias', self.core.fmlayer.b, step=step)\n tf.summary.scalar('regularization_penalty', self.regularization, step=step)\n tf.summary.scalar('loss', self.reduced_loss, step=step)\n tf.summary.scalar('target', self.target, step=step)", "def add_summary(self):\n self.merged = tf.compat.v1.summary.merge_all()\n self.file_writer = tf.compat.v1.summary.FileWriter(self.config.dir_output,\n self.sess.graph)", "def logging(self, function):\n avg_nms_time_per_step = sum(self.nms_times)/len(self.nms_times)\n avg_total_time_per_step = sum(self.total_times)/len(self.total_times)\n\n avg_min_latency = [x[0] for x in self.inference_times]\n avg_max_latency = [x[1] for x in self.inference_times]\n avg_latency = [x[2] for x in self.inference_times]\n\n function(\"Inference stats: image size {}x{}, batches per step {}, batch size {}, {} steps\".format(\n self.cfg.model.image_size, self.cfg.model.image_size, self.cfg.ipuopts.batches_per_step, self.cfg.model.micro_batch_size, len(self.total_times)\n ))\n function(\"--------------------------------------------------\")\n function(\"Inference\")\n function(\"Average Min Latency per Batch: {:.3f} ms\".format(1000 * sum(avg_min_latency)/len(self.inference_times)))\n function(\"Average Max Latency per Batch: {:.3f} ms\".format(1000 * sum(avg_max_latency)/len(self.inference_times)))\n function(\"Average Latency per Batch: {:.3f} ms\".format(1000 * sum(avg_latency)/len(self.inference_times)))\n function(\"Average Inference Throughput: {:.3f} img/s\".format(sum(self.inference_throughputs)/len(self.inference_throughputs)))\n function(\"--------------------------------------------------\")\n # TODO remove the NMS and end-to-end time report once NMS is on device\n function(\"End-to-end\")\n function(\"Average NMS Latency per Batch: {:.3f} ms\".format(1000 * avg_nms_time_per_step/self.cfg.ipuopts.batches_per_step))\n function(\"Average End-to-end Latency per Batch: {:.3f} ms\".format(1000 * avg_total_time_per_step/self.cfg.ipuopts.batches_per_step))\n function(\"End-to-end Throughput: {:.3f} img/s\".format(sum(self.total_throughputs)/len(self.total_throughputs)))\n function(\"==================================================\")\n\n if self.cfg.eval.metrics:\n self.compute_and_print_eval_metrics()", "def summarize(export_folder):\n click.echo('loading KEGG graphs')\n graphs = [\n from_pickle(os.path.join(export_folder, fname))\n for fname in tqdm(get_paths_in_folder(export_folder))\n ]\n\n if graphs:\n summarize_helper(graphs)\n else:\n click.echo(\"Please export KEGG to BEL first. Run 'python3 -m pathme kegg bel' \")", "def track_tick_processing_performance(self, duration):\n\n self.parse_count += 1\n self.total_parse_time += duration\n self.mean_parse_time = self.total_parse_time / self.parse_count", "def report(self):\n s = 'Parsed task #%s, command %r.\\n' % (self.task_data.get('task_id', 0),\n self._parse_task_and_get_cmd())\n if self.start_date:\n s += 'Task started at %s.\\n' % str(self.start_date.time())\n if self.finish_date:\n s += 'Finished %s at %s, duration %s.\\n' % (\n 'successfully' if self.finished_ok else 'containing errors',\n str(self.finish_date.time()),\n str(self.finish_date - self.start_date))\n if self.require_signal_failed:\n sig = self.require_signal_failed\n s += 'Failed signal is: %r, reason %r, started at %s, ' \\\n 'finished at %s, duration %s.\\n' % (\n sig[0], sig[1]['reason'],\n str(sig[1][STATUS_STARTED].time()),\n str(sig[1][STATUS_FINISHED].time()),\n str(sig[1][STATUS_FINISHED] - sig[1][STATUS_STARTED]))\n s += 'Items scrapped: %s, spider errors: %s.\\n' % (\n self.items_scraped, self.spider_errors)\n if self.required_signals_done:\n s += 'Succeeded required signals:\\n'\n for sig in self.required_signals_done.iteritems():\n s += '\\t%r, started at %s, finished at %s, duration %s;\\n' % (\n sig[0], str(sig[1][STATUS_STARTED].time()),\n str(sig[1][STATUS_FINISHED].time()),\n str(sig[1][STATUS_FINISHED] - sig[1][STATUS_STARTED]))\n else:\n s += 'None of the signals are finished.\\n'\n return s", "def save_graph_summary(self):\n writer = tf.summary.FileWriter(LOG_PATH)\n writer.add_graph(self.graph)", "def summary(self):\n\n self.model.summary(print_fn=lambda x: logging.info(x))", "def update_metadata(self):\n parser = GenericParser(\n fn_re='{}/(e\\d+s\\d+)_.*/Production.nc'.format(self.data_folder),\n group_names=['sim'],\n group_transforms=[lambda x: x],\n top_fn='',\n step_ps=self.timestep\n )\n meta = gather_metadata('{}/e*/*nc'.format(self.data_folder), parser)\n meta['top_fn'] = sorted(glob('{}/e*/structure.prmtop'.format(self.input_folder)))\n self.meta = meta", "def __do_analysis(self):\n #Step 1: connect to mongodb and pick a streamer\n dbclient = db_connect.DBClient()\n streamer_data = dbclient.analyze_number_of_stream_viewers(self.streamer)\n streamer_messeges_data = dbclient.analyzeStream(self.streamer)\n\n timearr = []\n messagesarr = []\n streamer_timearr = []\n num_chattersarr = []\n\n #create time and messages array for plotting purposes\n for entry in streamer_messeges_data:\n timearr.append(entry['start_time'])\n messagesarr.append(entry['messeges_count'] * entry['messeges_count'])\n #print(entry['start_time'])\n\n #create time and chatters array for plotting purposes\n for entry in streamer_data:\n streamer_timearr.append(entry['deltatime_from_start_of_clip'])\n num_chattersarr.append(entry['num_viewers'])\n\n # print('start time: ' + str(timearr[0]))\n # print('end time: ' + str(timearr[-1]))\n # print('duration: ' + str(timearr[-1] - timearr[0]))\n # print('average views/min = ' + str(sum(messagesarr) / len(messagesarr)))\n\n average_message_count = sum(messagesarr) / len(messagesarr)\n\n averagearr = []\n plotting_time_arr = []\n labelarr = []\n\n for i in range(len(timearr)):\n averagearr.append(average_message_count*1.8)\n #print(str(timearr[i]) + ' converts to ' + str(datetime.datetime(2020, 1, 1, 0, 0) + timearr[i]))\n plotting_time_arr.append(datetime.datetime(2020, 1, 1, 0, 0) + timearr[i])\n labelarr.append(str(i))\n\n plotting_streamer_timearr = []\n for i in range(len(streamer_timearr)):\n plotting_streamer_timearr.append(datetime.datetime(2020, 1, 1, 0, 0) + streamer_timearr[i])\n\n #plot messages and cuttoff\n messeges_over_time_fig = pyplot.figure(1)\n messeges_over_time_fig.set_figheight(15)\n messeges_over_time_fig.set_figwidth(30)\n messeges_over_time_fig.suptitle(self.streamer + \"'s video data\")\n messeges_over_time_sub = messeges_over_time_fig.add_subplot(211)\n\n pyplot.plot(plotting_time_arr,messagesarr,label='messages/min')\n dots = pyplot.plot(plotting_time_arr,messagesarr,'bo',label='messages/min')\n\n #label dots\n count = 0\n last_entry_was_above_line = False\n for i in range(len(plotting_time_arr)):\n #print(str(count) +': comparing ' + str(messagesarr[i]) + ' with ' + str(averagearr[i]))\n if(messagesarr[i] > averagearr[i]):\n if(last_entry_was_above_line):\n #Don't increment the count because this is part of the same clip\n count = count\n else:\n #new clip above the line, increment clip count\n count = count + 1\n messeges_over_time_sub.annotate(count,xy=(plotting_time_arr[i],messagesarr[i]))\n last_entry_was_above_line = True\n else:\n last_entry_was_above_line = False\n # messeges_over_time_sub.annotate('NA',xy=(plotting_time_arr[i],messagesarr[i]))\n\n #finish plotting\n pyplot.plot(plotting_time_arr, averagearr,'',label='average')\n pyplot.gcf().autofmt_xdate()\n pyplot.ylabel('Messeges*Messeges')\n pyplot.xlabel('Time')\n\n viewers_over_time_sub = messeges_over_time_fig.add_subplot(212)\n\n pyplot.plot(plotting_streamer_timearr,num_chattersarr,label='num chatters')\n pyplot.ylabel('Chatters')\n pyplot.xlabel('Time')\n\n pyplot.tight_layout()\n pyplot.savefig(output_file_location+self.streamer+'.png')\n print('saved chart to ' + output_file_location+self.streamer+'.png')\n # pyplot.show()\n return average_message_count, streamer_messeges_data", "def print_summary(self, **kwargs):\n compile_time = sum([ps.compile_time for ps\n in self.profile_stats.values()])\n\n fct_call = dict([(fn, ps.fct_callcount)\n for (fn, ps) in iteritems(self.profile_stats)])\n\n fct_call_time = dict([(fn, ps.fct_call_time)\n for (fn, ps) in iteritems(self.profile_stats)])\n\n apply_time = {}\n for fn, ps in iteritems(self.profile_stats):\n for (i, node) in enumerate(fn.maker.fgraph.toposort()):\n apply_time[(i, node)] = ps.apply_time[node]\n for (i, n), t in iteritems(apply_time):\n if t == 0:\n print(i, n)\n\n apply_cimpl = {}\n for ps in itervalues(self.profile_stats):\n apply_cimpl.update(ps.apply_cimpl)\n\n message = self.message\n\n variable_shape = {}\n for ps in itervalues(self.profile_stats):\n variable_shape.update(ps.variable_shape)\n\n other_time = dict(\n linker_time=sum(\n [ps.linker_time for ps in self.profile_stats.values()]),\n optimizer_time=sum(\n [ps.optimizer_time for ps in self.profile_stats.values()]))\n\n self.print_summary_(\"print_summary\",\n compile_time, fct_call_time, fct_call,\n apply_time, apply_cimpl, message, variable_shape,\n self.local_time, other_time,\n **kwargs)", "def add_summary(self):\r\n self.merged = tf.summary.merge_all()\r\n self.file_writer = tf.summary.FileWriter(self.config.dir_output,\r\n self.sess.graph)", "def print_summary(self, **kwargs):\r\n compile_time = sum([ps.compile_time for ps\r\n in self.profile_stats.values()])\r\n\r\n fct_call = dict([(fn, ps.fct_callcount)\r\n for (fn, ps) in self.profile_stats.items()])\r\n\r\n fct_call_time = dict([(fn, ps.fct_call_time)\r\n for (fn, ps) in self.profile_stats.items()])\r\n\r\n apply_time = {}\r\n for fn, ps in self.profile_stats.items():\r\n for (i, node) in enumerate(fn.maker.fgraph.toposort()):\r\n apply_time[(i, node)] = ps.apply_time[node]\r\n for (i, n), t in apply_time.items():\r\n if t == 0:\r\n print i, n\r\n\r\n apply_cimpl = {}\r\n for fn, ps in self.profile_stats.items():\r\n apply_cimpl.update(ps.apply_cimpl)\r\n\r\n message = self.message\r\n\r\n variable_shape = {}\r\n for fn, ps in self.profile_stats.items():\r\n variable_shape.update(ps.variable_shape)\r\n\r\n other_time = dict(\r\n linker_time=sum(\r\n [ps.linker_time for ps in self.profile_stats.values()]),\r\n optimizer_time=sum(\r\n [ps.optimizer_time for ps in self.profile_stats.values()]))\r\n\r\n self.print_summary_(\"print_summary\",\r\n compile_time, fct_call_time, fct_call,\r\n apply_time, apply_cimpl, message, variable_shape,\r\n self.local_time, other_time,\r\n **kwargs)", "def publish_summary(self, jobs):\n pass", "def summary(self) -> None:\n print(\"Model manager summary:\")\n print(\"Preprocessor:\")\n print(self.preprocessor)\n print(\"Model summary:\")\n self.model.summary()\n print(\"Postprocessor:\")\n print(self.postprocessor)", "def dump_status(allnodes):\n subgraphs = get_subgraphs(allnodes)\n tags = subgraphs[None]\n for name, node in tags.items():\n if not node['ancestors']:\n print(\"SOURCE: TAG/{0}\".format(node['name']))\n for name, node in tags.items():\n if not node['descendants']:\n print(\"SINK : TAG/{0}\".format(node['name']))\n for sgname in sorted(subgraphs):\n if sgname is None:\n continue\n nodes = subgraphs[sgname]\n dump_graph_status(allnodes, sgname, nodes, tags)" ]
[ "0.6030052", "0.56780493", "0.5605528", "0.549664", "0.5452645", "0.5409617", "0.5409617", "0.5368399", "0.53507394", "0.5329125", "0.53232145", "0.5293525", "0.52526665", "0.5219868", "0.5217269", "0.52124816", "0.5212287", "0.52016246", "0.5195992", "0.51884717", "0.5188069", "0.516794", "0.51677704", "0.51610667", "0.5157343", "0.51511717", "0.5132046", "0.5102787", "0.5097117", "0.50951344" ]
0.8361473
0
Creates a new saved home
def save_new_home(user_id, rm_property_id, longitude, latitude, address): home = SavedHomes( user_id = user_id, rm_property_id = rm_property_id, longitude=longitude, latitude=latitude, address=address, ) db.session.add(home) db.session.commit() return home
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_home():\n meta_desc = (\n 'Expected values and probability per lap of step-up'\n ' banners in Final Fantasy Brave Exvius (FFBE)')\n template_vars = {\n 'title' : sitesettings.SITE_NAME,\n 'siteurl' : sitesettings.SITE_URL,\n 'sitename' : sitesettings.SITE_NAME,\n 'meta_desc' : meta_desc,\n 'last_four_banners' : nav.get_last_four_banners('all'),\n 'last_four_single' : nav.get_last_four_banners('single'),\n 'last_four_multi' : nav.get_last_four_banners('multi'),\n 'all_banner_info' : get_all_banner_info(),\n }\n\n home_path = os.path.join(sitesettings.LOCAL_FILE_PATH)\n\n if not os.path.exists(home_path):\n os.makedirs(home_path)\n\n template_file = 'home.html'\n html_file_loc = os.path.join(home_path, 'index.html')\n generatehtml.generate_html(\n html_file_loc, template_file, template_vars, os.path.join(os.getcwd(), 'templates'))", "def create_home(\n home=None, render=False, replace=False, runtime=None, no_runtime=None, **kw\n):\n homePath = get_home_config_path(home)\n if not homePath:\n return None\n exists = os.path.exists(homePath)\n if exists and not replace:\n return None\n\n homedir, filename = os.path.split(homePath)\n if render: # just render\n repo = Repo.find_containing_repo(homedir)\n # XXX if repo and update: git stash; git checkout rendered\n ensembleDir = os.path.join(homedir, DefaultNames.EnsembleDirectory)\n ensembleRepo = Repo.find_containing_repo(ensembleDir)\n configPath, ensembleDir = render_project(\n homedir, repo, ensembleRepo, None, \"home\"\n )\n # XXX if repo and update: git commit -m\"updated\"; git checkout master; git stash pop\n return configPath\n else:\n if exists:\n rename_for_backup(homedir)\n\n newHome, configPath, repo = create_project(\n homedir,\n template=\"home\",\n runtime=runtime or \"venv:\",\n no_runtime=no_runtime,\n msg=\"Create the unfurl home repository\",\n creating_home=True,\n )\n if repo:\n repo.repo.git.branch(\"rendered\") # now create a branch\n return configPath", "def create_home_note(body,saved_home_id):\n\n note = HomeNotes(body=body, created_at = date.today(), saved_home_id=saved_home_id)\n\n db.session.add(note)\n db.session.commit()\n\n return note", "def create():", "def create():", "def test_create_home(self):\n factory = APIRequestFactory()\n request = factory.post('/homes/', {\n 'area_unit': 'SqFt',\n 'bathrooms': 2,\n 'bedrooms': 4,\n 'home_size': 1372,\n 'home_type': 'SingleFamily',\n 'last_sold_date': '',\n 'last_sold_price': '',\n 'link': 'https://www.zillow.com/homedetails/7417-Quimby-Ave-West-Hills-CA-91307/19866015_zpid/',\n 'price': '$739K',\n 'property_size': 10611,\n 'rent_price': '',\n 'rentzestimate_amount': 2850,\n 'rentzestimate_last_updated': '08/07/2018',\n 'tax_value': 215083,\n 'tax_year': 2017,\n 'year_built': 1956,\n 'zestimate_amount': 709630,\n 'zestimate_last_updated': '08/07/2018',\n 'zillow_id': 19866015,\n 'address': '7417 Quimby Ave',\n 'city': 'West Hills',\n 'state': 'CA',\n 'zipcode': 91307})\n\n self.assertEqual(Home.objects.count(), 0) # Bogus test", "def create():\n pass", "def new_database(app):\n app.status.message(\"Opening a folder..\")\n path = app.dialog.directory(\"Select a folder for the new database..\")\n if path == '':\n app.status.message('') \n return\n app.status.cursorToHourglass()\n app.close()\n folder = db.database(path=path, \n status = app.status, \n dialog = app.dialog)\n app.display(folder)\n app.status.hide()\n app.status.cursorToNormal()", "def home_newuser():\n\tui.newy1()\n\tnewfname = raw_input(\"NEW USER FIRST NAME: \")\n\tif newfname == \":EXIT\": home()\n\telse: pass\n\tnewlname = raw_input(\"NEW USER LAST NAME: \")\n\ttoday = raw_input(\"MARK PRESENT FOR TODAY [Y/N] \")\n\tif today in (\"Y\",\"y\"):\n\t\tz = db.new_user(newfname,newlname,True)\n\t\tif z is True:\n\t\t\tprint \"USER ADDED... PRESS [ENTER] TO GOTO HOMESCREEN\"\n\t\telif z is False:\n\t\t\tprint \"USER WAS NOT ADDED... PRESS [ENTER] TO GOTO HOMESCREEN\"\n\t\traw_input()\n\telif today == \":EXIT\":\n\t\thome()\n\telse:\n\t\tz = db.new_user(newfname,newlname,False)\n\t\tif z is True:\n\t\t\tprint \"USER ADDED... PRESS [ENTER] TO GOTO HOMESCREEN\"\n\t\telif z is False:\n\t\t\tprint \"USER WAS NOT ADDED... PRESS [ENTER] TO GOTO HOMESCREEN\"\n\t\traw_input()\n\thome()", "def home():\n dbcreate.generatedb()\n return render_template('home.html')", "def go_home(self):\n command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.GO_HOME)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def goto_create(self):\n\n self.create.click()", "def home():\n G.DEVICE.home()", "def createHost(self):\n self.createUser()\n self.user.host_for = [self.program.scope.key()]\n self.user.put()", "def new_project(self, rootdir=None):\n if rootdir is None:\n rootdir = Ui.instance().select_directory(user.home)\n if not os.path.exists(rootdir):\n os.makedirs(rootdir)\n\n print 'Weld.new_project in ', rootdir\n project = Project(rootdir)\n\n project.save()\n self.project = project\n self.current_project_path = rootdir\n Ui.instance().set_resources_draggable(True)\n Ui.instance().show_status('new project created')", "def create(db):\n if exists(db):\n print(\"phonebook %r already exists\" % db)\n sys.exit(-1)\n else:\n database = {}\n pickle.dump(database, open(db, 'wb'))\n print(\"created phonebook %r in the current directory\" % db)", "def newlawn(request, location):\n l = Lawn()\n l.location = location\n l.save()\n return TemplateResponse(request, 'lawn.html', {'lawn': l})", "def newrepo():\n form = AddRepoForm()\n if form.validate_on_submit():\n\n # make the directory for this package\n os.mkdir(DATA + form.name.data)\n\n flash('Repo created successfully')\n\n # redirect to the login page\n return redirect(url_for('home.dashboard'))\n\n # load registration template\n return render_template('home/add.html', form=form, title='Local Repo', target=\"add\")", "def register_home(route):\n global _home\n _home = route", "def register_home(route):\n global _home\n _home = route", "def home_folder(self, home_folder):\n\n self._home_folder = home_folder", "def create_new(self, root, name_length):\n self.name = create_random_string(name_length)\n self.ctime = datetime.datetime.now()\n date_time = datetime.datetime.strftime(self.ctime, \"%Y%m%d_%H%M%S\")\n self.folder = f\"{date_time}_{self.name}\"\n self.path = os.path.join(root, self.folder)\n try:\n os.makedirs(self.path)\n print(f\"Created folder {self.folder}\")\n except OSError:\n print(f\"Directory {self.folder} already exists\")\n except:\n print(f\"Cannot create folder: {self.folder}\")\n raise", "def create():\n\tcreate_db()", "def create(self):", "def root_create(request): # pylint: disable=W0613\r\n root = get_or_create_root()\r\n return redirect('wiki:get', path=root.path)", "def create(self):\n pass", "def create(self):\n pass", "def create(self):\n pass", "def create_screen(self, name):\n\n State.screen = Screen(name)\n State.save(name)", "def home(self, *args, **kwargs):\n pass" ]
[ "0.6463083", "0.6199926", "0.6192472", "0.60189664", "0.60189664", "0.5912727", "0.5791303", "0.5774873", "0.57226336", "0.5657594", "0.5638584", "0.56131387", "0.5612833", "0.56073946", "0.5592318", "0.5582505", "0.5559216", "0.55544263", "0.55526763", "0.55526763", "0.5500334", "0.54917246", "0.5483381", "0.54750425", "0.5472624", "0.5468469", "0.5468469", "0.5468469", "0.54683703", "0.54622835" ]
0.7197667
0
Creates a new home note on saved home
def create_home_note(body,saved_home_id): note = HomeNotes(body=body, created_at = date.today(), saved_home_id=saved_home_id) db.session.add(note) db.session.commit() return note
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_note(self, noteTitle, note, date):\n user = User.objects.create(username='userdemo')\n user.set_password('calnote24')\n user.save()\n Note.objects.create(noteTitle=noteTitle, note=note, date=date, user_id=user.id)", "def createNote(self, authenticationToken, note):\r\n pass", "def save(self, client, notebook, title):\n note = Types.Note()\n note.title = title\n note.notebookGuid = notebook.guid\n note.resources = self.resources\n note.content = self._get_body()\n\n try:\n note_store = client.get_note_store()\n return note_store.createNote(note)\n\n except Errors.EDAMUserException as e:\n print(\"EDAMUserException: {}\".format(e), file=sys.stderr)\n\n except Errors.EDAMNotFoundException as e:\n print(\"EDAMNotFoundException: {}\".format(e), file=sys.stderr)", "def create_a_note(self, data):\n return self.client._post(\"/notes\", json=data)", "def save_new_home(user_id, rm_property_id, longitude, latitude, address):\n home = SavedHomes(\n user_id = user_id,\n rm_property_id = rm_property_id,\n longitude=longitude,\n latitude=latitude,\n address=address,\n )\n \n db.session.add(home)\n db.session.commit()\n\n return home", "def create_note(self, owner, title, text, note_type, important):\r\n note = self.create(owner=owner, title=title, text=text, note_type=note_type, important=important)\r\n return note", "def createNote(title, author, body) -> dict:\n new_note = Note(title=title, author=author, body=body, created_at=now())\n new_note.save()\n return {\n 'uuid': new_note.uuid, 'title': new_note.title,\n 'author': new_note.author, 'body': new_note.body, 'created_at': localtime(new_note.created_at)\n }", "def write_note():\n\n title_note = request.form.get(\"title\")\n note = request.form.get(\"note\")\n\n date_string = datetime.today().strftime('%Y-%m-%d')\n diary = Note(user_id=session[\"user_id\"],title_note = title_note, note_created=date_string, note=note)\n\n db.session.add(diary)\n db.session.commit()\n \n return \"note added\"", "def create_note(job_applied_id, user_id, note_title, note_text, note_category, note_date_created):\n\n note = Note(job_applied_id =job_applied_id, user_id = user_id , note_title = note_title , note_text = note_text,note_category = note_category, note_date_created = note_date_created)\n db.session.add(note)\n db.session.commit()\n\n return note", "def noteCreate(ownerId, title):\n query = QUERY_CREATE_NOTE_ENTRY\n query = query.format(**{'owner_id':ownerId, 'title':title})\n data = None\n try:\n cursor.execute(query)\n connection.commit()\n data = cursor.fetchone()\n except Exception as e:\n return False, ERROR_CREATION_NOTE, 'Error creating note', -1\n noteId = -1\n if data != None:\n noteId, = data\n return True, NO_ERROR, 'Note created', noteId", "def test_create_note(self):\n pass", "def createNote(self, authenticationToken, note):\r\n self.send_createNote(authenticationToken, note)\r\n return self.recv_createNote()", "def save(self):\n db = DBStorage()\n p = self.createPatient()\n db.add_prescription(p)", "def create_note_meta_data(sender, instance, created, **kwargs):\n if created:\n NoteMetaData.objects.create(note=instance)", "def save_note():\n body = request.json\n user_id = body.get('user_id')\n token = body.get('token')\n user = User.get(User.id == user_id).username\n if not validate_token(user, token):\n return HTTPResponse(status=400, body={\"message\":\"Validation error.\"})\n note_title = body.get('title')\n note_content = body.get('content')\n validation = validate_note(user, note_title, note_content)\n if validation != \"OK\":\n return HTTPResponse(status=500, body={\"message\":validation})\n new_note = Notes(user=user_id, title=note_title, content=note_content)\n new_note.save()\n new_token = generate_token(user)\n ret = {\"token\":new_token.decode('utf-8'), \"user_id\":user_id}\n return HTTPResponse(status=500, body=ret)", "def add_note():\n pass", "def new(self, note):\n path = self.config.get_config('cfg_nt_path')\n filename = self.get_filename(note['content'])\n access_time = time.time()\n filetime = datetime.datetime.now().strftime(\"%y%m%d-%H%M%S\")\n\n if filename:\n if os.path.isfile(path + \"/\" + filename):\n filename = filetime + \"_\" + filename # Don't blast over files with same name, i.e. same first line.\n\n try:\n f = open(path + \"/\" + filename, 'w')\n f.write(note['content'])\n f.close()\n self.log.info(\"Writing %s\", filename)\n\n os.utime(path + \"/\" + filename, (access_time, float(note['modifydate'])))\n\n return filename\n except:\n self.log.error(\"Error writing note: %s\", note['key'])\n self.log.debug(\"Exception: %s\", sys.exc_info()[1])\n else:\n self.log.error(\"Error generating filename for note: %s\", note['key'])\n\n return False", "def create(user_last_name, user_first_name, movie_title, note):\n notation = Notation(user_last_name=user_last_name, user_first_name=user_first_name, movie_title=movie_title, note=note)\n\n return notation.save()", "def post(self, copy_id):\n checkCopyValidity(copy_id)\n note_body = request.get_json()\n new_note = models.Notes()\n new_note.parse_body(note_body)\n new_note.copy_id = copy_id\n db.session.add(new_note)\n db.session.commit()\n return 'A note \\\"{}\\\" has been added to book copy of {}'.format(new_note.note, copy_id), 201", "def add_note (self,\r\n index,\r\n keyset=None,\r\n text=None,\r\n metadata=None,\r\n note=None,\r\n keyset_only=None,\r\n meta_only=None,\r\n text_only=None):\r\n\r\n # USING SHELF\r\n\r\n if note:\r\n if self.using_shelf:\r\n self.note_dict[str(index)] = note\r\n text = note.text\r\n keyset = note.keyset\r\n metadata = note.meta\r\n\r\n\r\n elif keyset_only:\r\n if self.using_shelf:\r\n self.note_dict[str(index)].keyset = keyset_only\r\n keyset = keyset_only\r\n elif text_only:\r\n if self.using_shelf:\r\n self.note_dict[str(index)].text = text_only\r\n text = text_only\r\n elif meta_only:\r\n if self.using_shelf:\r\n self.note_dict[str(index)].meta = meta_only\r\n\r\n metadata = meta_only\r\n\r\n else:\r\n if self.using_shelf:\r\n self.note_dict[str(index)] = Note(keyset,\r\n text,\r\n metadata)\r\n if not text:\r\n text = ''\r\n if not keyset:\r\n keyset = set()\r\n if not metadata:\r\n metadata = {'size':self.defaults.get('size'),\r\n 'date':[str(datetime.datetime.now())],\r\n 'user':self.defaults.get('user')}\r\n\r\n # USING DATABASE\r\n if self.using_database:\r\n aprint('ADDING NOTE')\r\n\r\n text = text.replace(\"'\",\"''\")\r\n db_cursor.execute(\"SELECT * FROM notes\")\r\n\r\n\r\n value_tuple = (notebookname,str(index),text,metadata['size'],metadata['user'])\r\n db_cursor.execute(\"INSERT OR REPLACE\"\r\n +\" INTO notes\"\r\n +\" (notebook, note_index, note_body, size, user)\"\r\n +\" VALUES (?,?,?,?,?);\",\r\n value_tuple)\r\n if not isinstance(metadata['date'],list):\r\n metadata['date'] = [metadata['date']]\r\n metadata['date'] = [str(d) for d in metadata['date']]\r\n\r\n for d_temp in metadata['date']:\r\n\r\n value_tuple = (notebookname, str(index), d_temp,)\r\n db_cursor.execute(\"INSERT OR REPLACE\"\r\n +\" INTO timestamps\"\r\n +\" (notebook, note_index, timestamp)\"\r\n +\" VALUES (?,?,?);\",\r\n value_tuple)\r\n\r\n\r\n for k_temp in keyset:\r\n value_tuple = (notebookname, str(index), k_temp,)\r\n db_cursor.execute(\"INSERT OR REPLACE \"\r\n +\"INTO all_note_keys \"\r\n +\"(notebook, note_index, keyword)\"\r\n +\" VALUES (?,?,?);\",\r\n value_tuple)", "def save(self)->None:\n database.cursor.execute(\"INSERT INTO meetups(topic,happening_date,tags,location,images,body) VALUES(%s,%s,%s,%s,%s,%s) RETURNING id\", (\n self.topic,\n self.happening_on,\n self.tags,\n self.location,\n self.images,\n self.body\n ))\n super().save()", "def page_add_new_note(self):\n\n history_back_id = self.helper_retrieve_last_request_get_dict_key_val_index_zero_or_return_none(\"history_back_id\")\n\n task = Task() # just a temporary one, won't even be saved; it's just so that the form below can stay unchanged\n\n page_body = html_page_templates.page_edit_note_template(self.task_store, task, self.sess_action_auth,\n editing_mode_existing_note=False, history_back_id=history_back_id)\n\n return page_body", "def note_create(request):\n\n user = request.user\n note = Note.objects.create(author=user)\n serializer = NoteSerializer(note)\n\n return Response(serializer.data)\n # except Exception:\n # return Response(\"Something terrible went wrong. Can't create this note.\")", "def new_entry(title, content):\n\n title.strip # Remove the spaces from both sides.\n filename = f\"entries/{title}.md\"\n if default_storage.exists(filename):\n return False\n default_storage.save(filename, ContentFile(content))\n return True", "def create():", "def create():", "def new_note(self, memo, tags=()):\n self.notes.append(Note(memo, tags))", "def take_note(self, text):\r\n\r\n self.date = str(datetime.datetime.now().date()) + \"%\" + str(datetime.datetime.now().hour) + \"+\" + str(\r\n datetime.datetime.now().minute) + \"}\"\r\n self.file_name = \"notes/\" + str(self.date).replace(\":\", \"-\") + \"-note.txt\"\r\n with open(self.file_name, \"w\") as f:\r\n f.write(text)\r\n # subprocess.Popen([\"notepad.exe\", self.file_name])\r", "def write_note(note):\n \n logfile = open(NOTES_ORG_FILE, 'a')\n timestamp = time_stamp()\n str = \"** [%s] %s\\n:PROPERTIES:\\n:guid: %s\\n:END:\\n%s\\n\" % (timestamp, note.title, note.guid, note.description)\n logfile.write(str)\n\n logfile.close()", "def req_save_new_note(self):\n\n\n if self.helper_action_get_request_is_wrong(\"req_save_new_note\"):\n return\n\n if self.helper_sessactionauth_is_wrong():\n return\n\n task = Task()\n\n self.helper_save_task_itself_from_req(task)\n\n self.ui_backend.save_new_note(task)\n self.last_request_get_dict[\"taskid\"] = [\n task.taskid] # inject back so that the next rendered page can access it as if the note always existed" ]
[ "0.6835227", "0.6575961", "0.6395481", "0.63940823", "0.6325117", "0.6285266", "0.6280023", "0.6272395", "0.62301695", "0.6170147", "0.61274177", "0.61244637", "0.5765972", "0.5762432", "0.5730444", "0.57291615", "0.57194144", "0.5715769", "0.5715107", "0.5631282", "0.56228554", "0.56067634", "0.55984366", "0.55685467", "0.5553358", "0.5553358", "0.5548884", "0.5544736", "0.5535166", "0.55001616" ]
0.829951
0
removes note on saved home
def remove_home_note(home_note_id): note = HomeNotes.query.filter_by(home_note_id=home_note_id).first() db.session.delete(note) db.session.commit() return note
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rm(ctx, note):\n directory = ctx.obj[\"config\"][\"owner\"][\"dir\"]\n note = Note(directory, note)\n os.remove(note.path)\n\n click.secho(\"Success! Removed {}\".format(note.filename), fg=\"green\")", "def remove_note(self, note: int) -> None:\n if note in self.notes:\n self.notes.remove(note)", "def delete_note (self,\r\n index):\r\n\r\n if self.using_shelf:\r\n\r\n try:\r\n del self.note_dict[str(index)]\r\n except:\r\n display.noteprint((alerts.ATTENTION,alerts.DELETE+str(index)+alerts.FAILED))\r\n\r\n if self.using_database:\r\n aprint('DELETING NOTE')\r\n value_tuple = (notebookname,str(index),)\r\n db_cursor.execute(\"DELETE FROM \"\r\n +\"notes WHERE notebook=?\"\r\n +\" AND note_index=?;\",\r\n value_tuple)\r\n db_cursor.execute(\"DELETE FROM\"\r\n +\" all_note_keys\"\r\n +\" WHERE notebook=?\"\r\n +\" and note_index=?;\",\r\n value_tuple)\r\n db_cursor.execute(\"DELETE FROM\"\r\n +\" timestamps\"\r\n +\" WHERE notebook=?\"\r\n +\" and note_index=?;\",\r\n value_tuple)\r\n db_cursor.execute(\"SELECT note_index\"\r\n +\" FROM notes\"\r\n +\" WHERE notebook=?;\",\r\n (notebookname,))", "def test_remove_note(self):\n self.login()\n\n # Create note\n fakeName = fake.text()[0:100]\n fakeContent = fake.text()[:256]\n date = datetime.now()\n self.create_note(fakeName, fakeContent, date)\n time.sleep(2)\n\n url = \"%s%s\" % (self.live_server_url, \"/notes\")\n self.driver.get(url)\n time.sleep(5)\n\n linkButton = self.driver.find_element_by_id(\"delete-test\")\n linkButton.click()\n time.sleep(5)\n\n # Verify that the note was deleted\n self.assertEqual(self.live_server_url + \"/notes\", self.driver.current_url)\n\n message = self.driver.find_element_by_id(\n \"test-removed\").get_attribute(\"innerText\").strip()\n self.assertEqual(message, \"No Notes.\")", "def delete_note(entry_id):\n\n entry = Entry.query.get(entry_id)\n\n entry.description = None\n\n db.session.commit()\n\n return redirect(f\"/update-entry/{entry.entry_id}\")", "def delete(self, **kwargs):\n self.dbdel('note', kwargs)", "def aboutToDelete(self):\n \n pass", "def aboutToDelete(self):\n \n pass", "def aboutToDelete(self):\n \n pass", "async def _remove_note(self, ctx: Context, number: int):\n\n async with self.config.member(ctx.author).notes() as notes:\n try:\n notes.pop(number-1)\n except IndexError:\n return await ctx.send(\n _(\"Note number {} not found.\").format(number)\n )\n\n await ctx.message.add_reaction(CHECK_MARK)", "async def _remove_all_notes(self, ctx: Context):\n\n async with self.config.member(ctx.author).notes() as notes:\n notes.clear()\n\n await ctx.message.add_reaction(CHECK_MARK)", "def delete_entry(title):\n filename = f\"entries/{title}.md\"\n if default_storage.exists(filename):\n default_storage.delete(filename)", "def test_delete_note(self):\n pass", "def clear_user_visible_notes():\n grammar.user_visible_notes = []", "def deleteNote(self, authenticationToken, guid):\r\n pass", "def unsetNotes(self):\n return _libsbml.SBase_unsetNotes(self)", "def __deleteSave(self) -> None:\n os.remove(self.save_location)", "def delete_habit():\n analytics.remove_habit('Play Piano')", "def delete_entry(key):\n db = sh.open(the_phone_book_name, flag='c', writeback=True)\n if key in db:\n confirm = input(\"Delete {name} [y/n]: \".format(name=key))\n if confirm.lower() == 'y':\n print(\"Deleting entry ..... {name}\\n\".format(name=key))\n del db[key]", "def on_MainWindow_delete_event(self, *args):\n\n if self.prefs.get('savetemp') == 'False':\n if path.exists(MBRAT_TMPF):\n os.remove(MBRAT_TMPF)\n Gtk.main_quit(args)", "def remove_reminder(self, reminder_info):\n store = self.load_data(default=[])\n if reminder_info in store:\n index = store.index(reminder_info)\n del store[index]\n self.save_data(store)", "def delete(self, *args):\n if self.cur == Win.left:\n self.commands.delpl([])\n else:\n cur_song = self.rightwin.highlighted()\n\n self.rightwin.delete(cur_song)\n\n if not self.rightwin.data:\n self.switch_view_left()", "def remove_note(self, note, octave=-1):\n res = []\n for x in self.notes:\n if isinstance(note, six.string_types):\n if x.name != note:\n res.append(x)\n else:\n if x.octave != octave and octave != -1:\n res.append(x)\n else:\n if x != note:\n res.append(x)\n self.notes = res\n return res", "def delete_note(id):\n\n note = Note.query.get_or_404(id)\n db.session.delete(note)\n db.session.commit()\n flash('You have successfully deleted the role.')\n\n # redirect to the roles page\n return redirect(url_for('user.list_notes'))\n\n return render_template(title=\"Delete Note\")", "def delete_meal():", "def delete_menu():", "def unsend(self, irc, msg, args, user, id):\n try:\n note = self.db.get(id)\n except dbi.NoRecordError:\n irc.errorInvalid('note id')\n if note.frm == user.id:\n if not note.read:\n self.db.unsend(id)\n irc.replySuccess()\n else:\n irc.error('That note has been read already.')\n else:\n irc.error('That note wasn\\'t sent by you.')", "def delete():", "def remove_notes(self, notes):\n if isinstance(notes, six.string_types):\n return self.remove_note(notes)\n elif hasattr(notes, \"name\"):\n return self.remove_note(notes)\n else:\n for x in notes:\n self.remove_note(x)\n return self.notes", "def unset(bot, update, chat_data):\n if 'job' not in chat_data:\n update.message.reply_text('Sem notificacoes ativadas')\n return\n\n job = chat_data['job']\n job.schedule_removal()\n del chat_data['job']\n check = emojize(\":white_check_mark:\", use_aliases=True)\n update.message.reply_text('Notificacao cancelada com sucesso'+check+'')" ]
[ "0.7145103", "0.6710412", "0.65346456", "0.635698", "0.6318039", "0.62572455", "0.62466663", "0.62466663", "0.62466663", "0.6242504", "0.62359774", "0.62215626", "0.61557317", "0.6136657", "0.609395", "0.6005389", "0.5947361", "0.5940694", "0.59406656", "0.5910344", "0.5900662", "0.5890077", "0.5869502", "0.58666116", "0.5864381", "0.5820868", "0.58101916", "0.57966214", "0.5773166", "0.5763406" ]
0.7018293
1
queries SavedHomes using saved_home_id to get longitude
def saved_home_longitude(saved_home_id): sql = "SELECT longitude FROM saved_homes WHERE saved_home_id = :saved_home_id" cursor = db.session.execute(sql,{"saved_home_id": saved_home_id}) longitude = cursor.fetchone() return longitude
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saved_home_latitude(saved_home_id):\n\n sql = \"SELECT latitude FROM saved_homes WHERE saved_home_id = :saved_home_id\"\n\n cursor = db.session.execute(sql,{\"saved_home_id\": saved_home_id})\n\n latitude = cursor.fetchone()\n\n return latitude", "def get_address(saved_home_id):\n\n sql = \"SELECT address FROM saved_homes WHERE saved_home_id= :saved_home_id\"\n\n cursor = db.session.execute(sql,{\"saved_home_id\": saved_home_id})\n\n old_address = cursor.fetchone()\n \n address = \" \".join(old_address)\n\n return address", "def saved_businesses(saved_home_id):\n\n sql = \"SELECT bus_name FROM saved_businesses WHERE saved_home_id = :saved_home_id\"\n\n cursor = db.session.execute(sql,{\"saved_home_id\": saved_home_id})\n\n bus = cursor.fetchone()\n\n return bus", "def read_long_lat_proxi():\n session = Session()\n # data est une liste de tuple\n long_lat_proxi_data = session.query(Prix_Median.longitude,\n Prix_Median.latitude,\n Prix_Median.ocean_proximity_str,\n Prix_Median.ocean_proximity).all()\n session.close()\n list_long_lat = DataFrame(long_lat_proxi_data)\n list_long_lat = list_long_lat.drop_duplicates()\n return list_long_lat", "def get_location(geoname):\n\n DB_NAME = global_settings.DB_NAME_GEONAMES\n db_user = global_settings.POSTGRESQL_USERNAME\n db_password = global_settings.POSTGRESQL_PASSWORD\n db_host = global_settings.POSTGRESQL_HOST\n db_port = global_settings.POSTGRESQL_PORT\n\n sql = \"SELECT latitude, longitude FROM {} WHERE name like '{}'\".format(global_settings.TABLE_NAME_GEONAMES, geoname)\n\n resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql, True)\n\n if not resp['success']:\n return []\n\n lat_long = []\n\n for data in resp['data']:\n lat_long.append([data[0], data[1]])\n\n return lat_long", "def set_home_position(self, lat, lon, alt):\n pass", "def save_new_home(user_id, rm_property_id, longitude, latitude, address):\n home = SavedHomes(\n user_id = user_id,\n rm_property_id = rm_property_id,\n longitude=longitude,\n latitude=latitude,\n address=address,\n )\n \n db.session.add(home)\n db.session.commit()\n\n return home", "def get_all_locations(self):", "def get_all_locations():\n rs = run_query('''select * from zlrz_office_location''')\n return [] if rs is None else list(map(lambda t: Location(t[1], t[2], t[3], t[4], t[5], t[0]), rs))", "def get_longitude(query):\n data = get_data(query)\n longitude = 2.3504873\n try:\n longitude = (\n data.get_data(query)[\"results\"][0][\"geometry\"][\"location\"][\"lng\"])\n except IndexError:\n longitude = 2.3504873\n finally:\n return longitude", "def geo_locate(cursor: sqlite3.Cursor):\n cursor.execute('''DELETE FROM location_cache''') # Scrub previous results to start over\n\n geo_code = Nominatim(user_agent=\"capstone_project\")\n cursor.execute(\"\"\"SELECT location FROM combined_jobs\"\"\")\n jobs = cursor.fetchall() # Set to .fetchall once development is complete\n\n for location in jobs:\n try:\n full_loc = geo_code.geocode(location[0])\n print(location[0])\n cursor.execute(f\"\"\"INSERT INTO location_cache(location, latitude, longitude)\n VALUES (?,?,?)\"\"\", (location[0], full_loc.latitude, full_loc.longitude))\n except AttributeError:\n print(AttributeError)\n except sqlite3.IntegrityError:\n print(sqlite3.IntegrityError)", "def get_home(self):\n return self.complete_address[6]", "def getFeaturedLocation(guide):\n photos = guide.photos.all()\n\n x = 0\n y = 0\n z = 0\n\n size = 0\n\n for photo in photos:\n if photo.latitude:\n lat = radians(float(photo.latitude))\n lon = radians(float(photo.longitude))\n x += cos(lat) * cos(lon)\n y += cos(lat) * sin(lon)\n z += sin(lat)\n size+=1\n\n if size is 0:\n return None\n\n x = float(x / size)\n y = float(y / size)\n z = float(z / size)\n\n return {\n 'latitude': degrees(atan2(z, sqrt(x * x + y * y))),\n 'longitude': degrees(atan2(y, x))\n }\n # return atan2(z, sqrt(x * x + y * y)), atan2(y, x)\n\n\n\n # for photo in photos:\n # if photo.latitude:\n # return {\n # 'latitude': photo.latitude,\n # 'longitude': photo.longitude\n # }\n\n # return None", "def get_hikedetails_by_userloc(k):\n \n npbyuserloc = Hike.query.filter(Hike.area_name == k).all()\n\n return npbyuserloc", "def Fetch_station(long, lat, y):\r\n global ddf\r\n dmin = 1000000\r\n rs = 0\r\n i=0\r\n for i in range(len(ddf[y])):\r\n #Calculate the distance between zip code location and weather station location\r\n dnew = Distance_orthonormique(ddf[y]['LON'][i], ddf[y]['LAT'][i], long, lat)\r\n\r\n if(dmin > dnew):\r\n #If the last smaller distance is superior than the current distance :\r\n #the new smaller distance is the current distance\r\n dmin = dnew\r\n rs = i\r\n\r\n #rs = index dataframe weather station\r\n #ddf[y]['STATION NAME'][rs] = Weather station name\r\n #round(dmin, 2) = Distance between weather station and zip code\r\n \r\n return rs, ddf[y]['STATION NAME'][rs], round(dmin,2)", "def lat_lng_approximation(homes, destination, lat_lng_dest, average_speed):\n\n for home in homes:\n # Stores the lat and lng points for the home\n lat_lng_home = (home.home.latitude, home.home.longitude)\n\n # Returns the distance from the two lat lng points in miles\n distance = geopy.distance.geodesic(lat_lng_home, lat_lng_dest).miles\n\n # If the distance is less than a mile then don't add any distance since it is already so close\n if distance > 1:\n # Extra distance is determined by giving more distance to homes farther away\n extra_distance = EXTRA_DISTANCE_LAT_LNG_APPROX * (1 - 1.0/distance)\n # This normalizes the value since walking needs less of a weight than biking since homes\n # are more direct when walking.\n distance += extra_distance * average_speed/AVERAGE_BICYCLING_SPEED\n if average_speed is not 0:\n # If the speed is not zero (to prevent divide by zero, then add the commute time to\n # the home\n commute_time_hours = distance / average_speed\n commute_time = commute_time_hours * 60\n home.approx_commute_times[destination] = commute_time\n else:\n # If there was a divide by zero then just eliminate the home\n home.eliminate_home()", "def location_search(self, lat: float, lng: float) -> List[Location]:\n params = {\n \"latitude\": lat,\n \"longitude\": lng,\n # rankToken=c544eea5-726b-4091-a916-a71a35a76474 - self.uuid?\n # fb_access_token=EAABwzLixnjYBABK2YBFkT...pKrjju4cijEGYtcbIyCSJ0j4ZD\n }\n result = self.private_request(\"location_search/\", params=params)\n locations = []\n for venue in result[\"venues\"]:\n if \"lat\" not in venue:\n venue[\"lat\"] = lat\n venue[\"lng\"] = lng\n locations.append(extract_location(venue))\n return locations", "def search_geoloc_range(request):\n\n distance = float(request.POST['distance'])\n\n latlng = (request.POST['latlng']).replace(\"(\",'').replace(\")\",'').split(', ')\n latitude = float(latlng[0])\n longitude = float(latlng[1])\n print distance\n print latitude\n print longitude\n\n # count range of nowa latlng\n radius_lat = (distance/(69.172)) #count latitude range\n min_lat = latitude - radius_lat\n max_lat = latitude + radius_lat\n print min_lat\n print max_lat\n\n radius_lng = (math.fabs(distance/(math.cos(longitude) * 69.172))) #count longitude range\n min_lng = longitude - radius_lng\n max_lng = longitude + radius_lng\n print min_lng\n print max_lng\n\n # if sys.version_info < (2, 7):\n # min_lat = decimal.Decimal(str(min_lat))\n # max_lat = decimal.Decimal(str(max_lat))\n # min_lng = decimal.Decimal(str(min_lng))\n # max_lng = decimal.Decimal(str(max_lng))\n\n # query db to match the range of dentist work place in db\n total = WorkPlace.objects.filter(latitude__gte=min_lat, latitude__lte=max_lat,\n longitude__gte=min_lng, longitude__lte=max_lng).count()\n\n result = []\n\n # step for how many lines separate per page. then count nowa page's start line no. and end line no.\n if 'page' in request.POST:\n page = request.POST['page']\n else:\n page = 1\n\n step = 10\n end = step * int(page)\n start = step * (int(page)-1)\n is_end = False\n\n if (end - total) < step:\n is_end = False\n WorkPlaceDict = WorkPlace.objects.filter(latitude__gte=min_lat, latitude__lte=max_lat,\n longitude__gte=min_lng, longitude__lte=max_lng).order_by('id')[start:end]\n\n for i in WorkPlaceDict:\n\n dentist_profile = i.dentistid\n did = dentist_profile.user.user.id\n\n latitude = str(i.latitude)\n longitude = str(i.longitude)\n latlng = \"(\"+latitude+\", \"+longitude+\")\"\n\n counts = _relation_counts(request,did,request.user.id)\n\n i_wrap = {\n \"clinic\": i.clinic_name,\n \"work_location\": i.location,\n \"latlng\": latlng,\n \"business_hour\": str(i.business_hour),\n \"dentistid\": did,\n \"dentistname\": _show_obj_name(did),\n \"summary\": dentist_profile.user.summary,\n \"avatar\": settings.MEDIA_URL + str(dentist_profile.user.imagesmall),\n \"patient_count\": counts[\"patient_count\"],\n \"follower_count\": counts[\"follower_count\"],\n \"status\": counts[\"status\"],\n \"is_end\": is_end\n }\n\n result.append(i_wrap)\n\n else:\n is_end = True\n i_wrap = {\n \"is_end\": is_end\n }\n\n result.append(i_wrap)\n\n template_var = {\n \"searchresult\": result\n }\n\n return JsonResponse(template_var)", "def shot_lon_lat(self) -> list[tuple[float, float]]:\n if self._shot_lon_lat is None:\n if self.parent_granule.product == \"GEDI_L2A\":\n self._shot_lon_lat = list(\n zip(self[\"lon_lowestmode\"], self[\"lat_lowestmode\"])\n )\n elif self.parent_granule.product == \"GEDI_L1B\":\n self._shot_lon_lat = list(\n zip(\n self[\"geolocation/longitude_lastbin\"],\n self[\"geolocation/latitude_lastbin\"],\n )\n )\n else:\n raise NotImplementedError(\n \"No method to get main data for \"\n f\"product {self.parent_granule.product}\"\n )\n return self._shot_lon_lat", "def test_query_google(self):\n google_api = LocationData()\n latLng = google_api.getLatLong(test_address)\n self.assertEqual(latLng['lat'], 32.625849)", "def find_5near_hospitals(lon, lat):\r\n engine = get_sql_engine()\r\n hospital5 = text(\r\n \"\"\"\r\n SELECT\r\n \"HOSPITAL_NAME\" AS name, \"STREET_ADDRESS\" as address,\r\n \"PHONE_NUMBER\" as contact, geom,\r\n\t ST_X(geom) AS lon, ST_Y(geom) AS lat,\r\n\t ST_Distance(ST_SetSRID(ST_MakePoint(:lon, :lat), 4326)::geography, geom::geography) AS distance\r\n FROM philly_hospital\r\n ORDER BY 7 ASC\r\n LIMIT 5\r\n \"\"\"\r\n )\r\n near_hospital = gpd.read_postgis(hospital5, con=engine, params={\"lon\": lon, \"lat\": lat})\r\n return near_hospital", "def get_map_locs(self, CalSwimView):\n # Initialize query list\n query_build = []\n \n if (CalSwimView.lat and CalSwimView.lng): \n # Search query has a specified location thus check against intersection of points and polygons in database\n self.cursor.execute(\"SET @center = GeomFromText('POINT(%s %s)');\",(float(CalSwimView.lat), float(CalSwimView.lng)))\n self.cursor.execute(\"SET @radius = %s;\",(CalSwimView.radius))\n self.cursor.execute(\"\"\"\n SET @bbox = CONCAT('POLYGON((',\n X(@center) - @radius, ' ', Y(@center) - @radius, ',',\n X(@center) + @radius, ' ', Y(@center) - @radius, ',',\n X(@center) + @radius, ' ', Y(@center) + @radius, ',',\n X(@center) - @radius, ' ', Y(@center) + @radius, ',',\n X(@center) - @radius, ' ', Y(@center) - @radius, '))'\n );\n \"\"\")\n query_build.append(\"\"\"\n SELECT gd_id, organization, project_name_short, project_name, project_description, data_type, data_target, AsText(location)\n FROM GeoData\n WHERE Intersects( location, GeomFromText(@bbox) )\n AND\n CASE geometrytype(location)\n WHEN 'POINT' THEN\n SQRT(POW( ABS( X(location) - X(@center)), 2) + POW( ABS(Y(location) - Y(@center)), 2 )) < @radius\n ELSE\n TRUE\n END\n \"\"\")\n # Search query has at least 1 keyword\n if len(CalSwimView.keywords) > 0:\n # Just a few MySQL notes:\n # Default MySQL operation executes an \"OR\" search among terms\n # To make sure all terms are in a given result, \"AND\" search among terms, then just add prefix \"+\" before each term\n # To exclude results with a given term, just add prefix \"-\" before the term\n keyword_query = \"*, \".join(CalSwimView.keywords) +\"*\" \n query_build.append(\"\"\" \n AND\n MATCH (organization, contact, project_name, project_description, project_funder, data_target, location_description, data_collector, data_type, keyword, other)\n AGAINST ('%(KeywordQuery)s' IN BOOLEAN MODE)\n \"\"\" % {\"KeywordQuery\":keyword_query})\n else:\n # Search query does not have a specified location\n query_build.append(\"\"\"\n SELECT gd_id, organization, project_name_short, project_name, project_description, data_type, data_target, AsText(location)\n FROM GeoData\n \"\"\")\n # Search query has at least 1 keyword\n if len(CalSwimView.keywords) > 0:\n # Just a few MySQL notes:\n # Default MySQL operation executes an \"OR\" search among terms\n # To make sure all terms are in a given result, \"AND\" search among terms, then just add prefix \"+\" before each term\n # To exclude results with a given term, just add prefix \"-\" before the term\n keyword_query = \"*, \".join(CalSwimView.keywords) +\"*\" \n query_build.append(\"\"\" \n WHERE\n MATCH (organization, contact, project_name, project_description, project_funder, data_target, location_description, data_collector, data_type, keyword, other)\n AGAINST ('%(KeywordQuery)s' IN BOOLEAN MODE)\n \"\"\" % {\"KeywordQuery\":keyword_query})\n select_query = \"\\n\".join(query_build)\n #print >> CalSwimView.errors, select_query\n \n # execute SQL query using execute() method.\n self.cursor.execute(select_query)\n\n # Fetch a single row using fetchone() method.\n rows = [] \n table_data = {}\n coordinates = []\n while(1):\n row=self.cursor.fetchone()\n if row == None:\n break \n coordinates.append( str(row[7]).replace('POINT(','').replace('POLYGON((','').replace(')','') )\n rows.append( {\"c\":[{\"v\":row[0]}, {\"v\":row[1]}, {\"v\":row[2]}, {\"v\":row[3]}, {\"v\":row[4]}, {\"v\":row[5]}, {\"v\":row[6]}]} )\n \n # Return search values as json\n cols = [{\"id\":'gd_id', \"label\":'gd_id', \"type\":'string'},\n {\"id\":'organization', \"label\":'Organization', \"type\":'string'},\n {\"id\":'project_short', \"label\":'Project Short', \"type\":'string'},\n {\"id\":'project', \"label\":'Project', \"type\":'string'},\n {\"id\":'description', \"label\":'Description', \"type\":'string'}, \n {\"id\":'target', \"label\":'Target', \"type\":'string'}]\n table_data[\"cols\"] = cols\n table_data[\"rows\"] = rows\n # Assign table data to json table data container\n json_data = {}\n json_data[\"table_data\"] = table_data\n json_data[\"coordinates\"] = coordinates\n \n # Close DB connections \n self.cursor.close()\n \n # Return results\n return json.dumps(json_data)", "def lat_lons(self):", "def _update_home_information(self, homes):\n\n if homes is not None and len(homes) > 0:\n self._home = homes[0]\n self.has_home = True\n self._update_horizon(max(abs(self._home[0]), abs(self._home[1])))\n if self.experimental_home is None:\n self.experimental_home = self._home\n else:\n if self.experimental_home not in self.last_scan['Home']:\n print self, self.experimental_home, \"is not in\", self.last_scan['Home']\n self.experimental_home = self._home\n else:\n self._home = self.experimental_home # Try some reckoning\n\n return", "def location(bot, update):\n\n bot.send_message(chat_id=update.message.chat_id, text=\"OK you wait ah...\")\n latitude = update.message.location.latitude\n longitude = update.message.location.longitude\n bot.send_message(chat_id=update.message.chat_id, text=\"Just let you know for fun lol - your latitude is {0}, and your longitude is {1}\".format(latitude,longitude))\n try:\n # Read carpark csv as dataframe\n df = pd.read_csv('Parking_withcoords.csv')\n \n # Calculate distance between each carpark and postal code and append it to dataframe\n distance = []\n for coord in df['Coord_rad']: \n carpark = haversine((radians(latitude),radians(longitude)), ast.literal_eval(coord)) #converts string to tuple\n distance.append(carpark)\n df['Distance_km'] = distance\n\n # Sort in ascending order and extract top 5\n top_five = df.sort_values('Distance_km').head(5)\n\n for row in top_five['Info']:\n bot.send_message(chat_id=update.message.chat_id, parse_mode='HTML', text=row.replace(\"\\$\", \"$\"))\n\n bot.send_message(chat_id=update.message.chat_id, text=\"Fast hor! If you want to check other places, type /start again ok :P\")\n except:\n bot.send_message(chat_id=update.message.chat_id, text=\"Jialat liao got error...try again with /start and then use the postal code method can? Paiseh!\")", "def locate(self):\n \n #CONNECT TO API\n api = GoogleV3(api_key = self.google_key)\n\n #INITALIZE ARRAY\n array = []\n\n #START GEOCODING ADDRESSES\n for i in tqdm(range(len(self.df)), desc='Geocoding Addresses'):\n\n \n row = self.df.iloc[i]\n\n #GET ADDRESS VARIABLES\n st_name = row['street_name']\n st_number = row['house_number']\n city = row['city']\n state = row['state/province']\n listing_number = row['listing_number']\n zip = row['postal_code']\n\n\n #FORMAT ADDRESS FOR API\n full_address = str(\"{} {},{},{},{}\".format(st_number, st_name, city, state, zip))\n\n #TRY TO LOCATE WITH GOOGLE\n try:\n \n location = api.geocode(full_address, timeout=10)\n\n lat = location.latitude\n lon = location.longitude\n \n\n info = [lat,lon, listing_number]\n\n array.append(info)\n\n next \n\n #Go to next if you cant locate\n except:\n\n info = [0,0, listing_number]\n\n array.append(info)\n\n next\n\n #CONVERT SERIES TO DATAFRAME\n geo_data = pd.DataFrame(data = array, columns = ['lat', 'lon', 'listing_number'])\n \n #INNER JOIN DATA TO DATAFRAME\n self.df = pd.merge(self.df, geo_data, on= 'listing_number', how = 'inner')", "def geo(self):\n return self.query.geo", "def get_location_by_id(self, location_id):", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def test_fetchlocation(self):\n result = export.processExport(houseId=1,\n locationIds = [1,],\n )\n\n self.assertEqual(result.shape, (2880, 1))\n self.assertEqual(result.columns[0], LOC1)\n\n result = export.processExport(houseId=1,\n locationIds = [2,],\n )\n\n self.assertEqual(result.shape, (2880, 1))\n self.assertEqual(result.columns[0], LOC2)" ]
[ "0.76033956", "0.6118254", "0.5742464", "0.57084286", "0.57080907", "0.5699466", "0.551681", "0.55092907", "0.54990584", "0.5489118", "0.5447527", "0.54141396", "0.54135525", "0.5385749", "0.52658933", "0.526368", "0.52612764", "0.5260204", "0.5239613", "0.5237394", "0.5224577", "0.5209894", "0.52051365", "0.51983416", "0.51951915", "0.517565", "0.5161173", "0.51505756", "0.5148553", "0.511501" ]
0.7990343
0