query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Merges data from a single transaction. Snapshot is an instance of StatsEngine that contains stats for the single transaction.
def merge(self, snapshot): if not self.__settings: return self.merge_metric_stats(snapshot) self._merge_transaction_events(snapshot) self._merge_synthetics_events(snapshot) self._merge_error_events(snapshot) self._merge_error_traces(snapshot) self._merge_custom_events(snapshot) self._merge_span_events(snapshot) self._merge_sql(snapshot) self._merge_traces(snapshot)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge_metric_stats(self, snapshot):\n\n if not self.__settings:\n return\n\n for key, other in six.iteritems(snapshot.__stats_table):\n stats = self.__stats_table.get(key)\n if not stats:\n self.__stats_table[key] = other\n else:\n stats.merge_stats(other)", "def record_transaction(self, transaction):\n\n if not self.__settings:\n return\n\n settings = self.__settings\n\n # Record the apdex, value and time metrics generated from the\n # transaction. Whether time metrics are reported as distinct\n # metrics or into a rollup is in part controlled via settings\n # for minimum number of unique metrics to be reported and thence\n # whether over a time threshold calculated as percentage of\n # overall request time, up to a maximum number of unique\n # metrics. This is intended to limit how many metrics are\n # reported for each transaction and try and cut down on an\n # explosion of unique metric names. The limits and thresholds\n # are applied after the metrics are reverse sorted based on\n # exclusive times for each metric. This ensures that the metrics\n # with greatest exclusive time are retained over those with\n # lesser time. Such metrics get reported into the performance\n # breakdown tab for specific web transactions.\n\n self.record_apdex_metrics(transaction.apdex_metrics(self))\n\n self.merge_custom_metrics(transaction.custom_metrics.metrics())\n\n self.record_time_metrics(transaction.time_metrics(self))\n\n # Capture any errors if error collection is enabled.\n # Only retain maximum number allowed per harvest.\n\n error_collector = settings.error_collector\n\n if (error_collector.enabled and settings.collect_errors and\n len(self.__transaction_errors) <\n settings.agent_limits.errors_per_harvest):\n self.__transaction_errors.extend(transaction.error_details())\n\n self.__transaction_errors = self.__transaction_errors[:\n settings.agent_limits.errors_per_harvest]\n\n if (error_collector.capture_events and\n error_collector.enabled and\n settings.collect_error_events):\n events = transaction.error_events(self.__stats_table)\n for event in events:\n self._error_events.add(event, priority=transaction.priority)\n\n # Capture any sql traces if transaction tracer enabled.\n\n if settings.slow_sql.enabled and settings.collect_traces:\n for node in transaction.slow_sql_nodes(self):\n self.record_slow_sql_node(node)\n\n # Remember as slowest transaction if transaction tracer\n # is enabled, it is over the threshold and slower than\n # any existing transaction seen for this period and in\n # the historical snapshot of slow transactions, plus\n # recording of transaction trace for this transaction\n # has not been suppressed.\n\n transaction_tracer = settings.transaction_tracer\n\n if (not transaction.suppress_transaction_trace and\n transaction_tracer.enabled and settings.collect_traces):\n\n # Transactions saved for Synthetics transactions\n # do not depend on the transaction threshold.\n\n self._update_synthetics_transaction(transaction)\n\n threshold = transaction_tracer.transaction_threshold\n\n if threshold is None:\n threshold = transaction.apdex_t * 4\n\n if transaction.duration >= threshold:\n self._update_slow_transaction(transaction)\n\n # Create the transaction event and add it to the\n # appropriate \"bucket.\" Synthetic requests are saved in one,\n # while transactions from regular requests are saved in another.\n\n if transaction.synthetics_resource_id:\n event = transaction.transaction_event(self.__stats_table)\n self._synthetics_events.add(event)\n\n elif (settings.collect_analytics_events and\n settings.transaction_events.enabled):\n\n event = transaction.transaction_event(self.__stats_table)\n self._transaction_events.add(event, priority=transaction.priority)\n\n # Merge in custom events\n\n if (settings.collect_custom_events and\n settings.custom_insights_events.enabled):\n self.custom_events.merge(transaction.custom_events)\n\n # Merge in span events\n\n if (settings.distributed_tracing.enabled and\n settings.span_events.enabled and settings.collect_span_events):\n if settings.infinite_tracing.enabled:\n for event in transaction.span_protos(settings):\n self._span_stream.put(event)\n elif transaction.sampled:\n for event in transaction.span_events(self.__settings):\n self._span_events.add(event, priority=transaction.priority)", "def snapshot(self) -> Snapshot:\n snapshot = self.open(Snapshot.type).signed\n if not isinstance(snapshot, Snapshot):\n raise RuntimeError(\"Unexpected snapshot type\")\n return snapshot", "def transaction(self, transaction):\n # Allow for a list of blocks..\n transaction = utils.request_type(transaction)\n\n res = r.get(self.url + self.tx_info + str(transaction))\n return self.execute(res)", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.gain = self.snapshot[0]\r\n self.block = self.snapshot[1]\r\n self.locked = self.snapshot[2]\r\n self.bucket_num = self.snapshot[3]", "def apply_transaction(self,\n header: BlockHeader,\n transaction: BaseTransaction\n ) -> Tuple[BlockHeader, Receipt, BaseComputation]:\n processed_tx = self.process_transaction(header.shard_id, transaction)\n return super().apply_transaction(header, processed_tx)", "def _merge_report(self, target, new):\n time = None\n if 'ts' in new['parsed']:\n time = new['parsed']['ts']\n\n if (target.get('lastSeenDate', None) and\n time and\n target['lastSeenDate'] < time):\n target['lastSeenDate'] = time\n\n query_millis = int(new['parsed']['stats']['millis'])\n target['stats']['totalTimeMillis'] += query_millis\n target['stats']['count'] += 1\n target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']", "def _merge_report(self, target, new):\r\n time = None\r\n if 'ts' in new['parsed']:\r\n time = new['parsed']['ts']\r\n\r\n if (target.get('lastSeenDate', None) and\r\n time and\r\n target['lastSeenDate'] < time):\r\n target['lastSeenDate'] = time\r\n\r\n query_millis = int(new['parsed']['stats']['millis'])\r\n target['stats']['totalTimeMillis'] += query_millis\r\n target['stats']['count'] += 1\r\n target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']", "def rollback(self, snapshot):\n\n if not self.__settings:\n return\n\n _logger.debug('Performing rollback of data into '\n 'subsequent harvest period. Metric data and transaction events'\n 'will be preserved and rolled into next harvest')\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot, rollback=True)\n self._merge_synthetics_events(snapshot, rollback=True)\n self._merge_error_events(snapshot)\n self._merge_custom_events(snapshot, rollback=True)\n self._merge_span_events(snapshot, rollback=True)", "def take_snapshot(self):\r\n self.snapshot = self.gain, self.block, self.locked, self.bucket_num", "def snapshot(self, snapshot):\n self._context[\"snapshot\"] = snapshot", "def merge(self, dataset):\n def merge_data(source, dest):\n for key, value in source.items():\n if isinstance(value, dict):\n merge_data(value, dest.setdefault(key, {}))\n else:\n dest[key] = value\n return dest\n\n merge_data(dataset.data, self._data)\n\n for h in dataset.task_history:\n if h not in self._task_history:\n self._task_history.append(h)", "def restore(self, snapshot):\n self.unit_name = snapshot[\"unit_name\"]", "def get_snapshot(self):\n data = {\n \"t\": self.sim.t,\n \"time\": self.time,\n \"vehicles\": self.sim.vehicles,\n \"stations\": self.sim.stations,\n \"state\": self.state,\n \"done\": self.is_done}\n return copy.deepcopy(data)", "def populate_from_transaction(cls, transaction):\n # type: (Transaction) -> Baggage\n hub = transaction.hub or sentry_sdk.Hub.current\n client = hub.client\n sentry_items = {} # type: Dict[str, str]\n\n if not client:\n return Baggage(sentry_items)\n\n options = client.options or {}\n user = (hub.scope and hub.scope._user) or {}\n\n sentry_items[\"trace_id\"] = transaction.trace_id\n\n if options.get(\"environment\"):\n sentry_items[\"environment\"] = options[\"environment\"]\n\n if options.get(\"release\"):\n sentry_items[\"release\"] = options[\"release\"]\n\n if options.get(\"dsn\"):\n sentry_items[\"public_key\"] = Dsn(options[\"dsn\"]).public_key\n\n if (\n transaction.name\n and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES\n ):\n sentry_items[\"transaction\"] = transaction.name\n\n if user.get(\"segment\"):\n sentry_items[\"user_segment\"] = user[\"segment\"]\n\n if transaction.sample_rate is not None:\n sentry_items[\"sample_rate\"] = str(transaction.sample_rate)\n\n # there's an existing baggage but it was mutable,\n # which is why we are creating this new baggage.\n # However, if by chance the user put some sentry items in there, give them precedence.\n if transaction._baggage and transaction._baggage.sentry_items:\n sentry_items.update(transaction._baggage.sentry_items)\n\n return Baggage(sentry_items, mutable=False)", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.max_gain = self.snapshot[0]\r\n self.array = self.snapshot[1]\r\n self.free_cell_list = self.snapshot[2]", "def Merge(self, other):\n\n # Logging just in case\n self.db.ExecuteSql('insert into events(timestamp, track_id, event, '\n 'details) values (now(), %d, \"merge: before\", %s);'\n %(self.persistant['id'],\n sql.FormatSqlValue('details',\n repr(self.persistant))))\n self.db.ExecuteSql('insert into events(timestamp, track_id, event, '\n 'details) values (now(), %d, \"merge: deleted\", %s);'\n %(other.persistant['id'], \n sql.FormatSqlValue('details',\n repr(other.persistant))))\n\n # Fields which can be summed\n for f in ['plays', 'skips']:\n self.persistant[f] = (self.persistant.get(f, 0) +\n other.persistant.get(f, 0))\n\n # Date fields where we take the newest\n for f in ['last_played', 'last_skipped', 'last_action']:\n a = self.persistant.get(f, datetime.datetime(1970, 1, 1))\n b = other.persistant.get(f, datetime.datetime(1970, 1, 1))\n if a > b:\n v = a\n else:\n v = b\n if v != datetime.datetime(1970, 1, 1):\n self.persistant[f] = v\n\n # Date fields where we take the oldest\n for f in ['creation_time']:\n a = self.persistant.get(f, datetime.datetime(1970, 1, 1))\n b = other.persistant.get(f, datetime.datetime(1970, 1, 1))\n if a < b:\n v = a\n else:\n v = b\n if v != datetime.datetime(1970, 1, 1):\n self.persistant[f] = v\n\n # Fields where we only clobber ours if we don't have a value\n for f in ['artist', 'album', 'song']:\n if not self.persistant.has_key(f) or not self.persistant[f]:\n self.persistant[f] = other.persistant.get(f, None)\n\n # Sometimes the number is a placeholder\n if self.persistant.has_key('number') and self.persistant['number'] == -1:\n self.persistant['number'] = other.persistant.get('number', -1)\n if not self.persistant.has_key('number'):\n self.persistant['number'] = other.persistant.get('number', -1)\n\n # Update the id in the tags table\n tags = self.db.GetRows('select tag from tags where track_id=%d;'\n % other.persistant['id'])\n self.db.ExecuteSql('insert into events(timestamp, track_id, event, '\n 'details) values (now(), %d, \"merge: tags: %d\", %s);'\n %(self.persistant['id'], other.persistant['id'],\n sql.FormatSqlValue('details', repr(tags))))\n\n try:\n self.db.ExecuteSql('update tags set track_id=%d where track_id=%d;'\n %(self.persistant['id'], other.persistant['id']))\n self.db.ExecuteSql('commit;')\n except:\n # This can happen if the is already a matching tag for the first track\n pass\n\n # Update the id in the paths table\n paths = self.db.GetRows('select path from paths where track_id=%d;'\n % other.persistant['id'])\n self.db.ExecuteSql('insert into events(timestamp, track_id, event, '\n 'details) values (now(), %d, \"merge: paths: %d\", %s);'\n %(self.persistant['id'], other.persistant['id'],\n sql.FormatSqlValue('details', repr(paths))))\n \n self.db.ExecuteSql('update paths set track_id=%d where track_id=%d;'\n %(self.persistant['id'], other.persistant['id']))\n self.db.ExecuteSql('commit;')\n\n self.db.ExecuteSql('insert into events(timestamp, track_id, event, '\n 'details) values (now(), %d, \"merge: after\", %s);'\n %(self.persistant['id'],\n sql.FormatSqlValue('details',\n repr(self.persistant))))\n self.db.ExecuteSql('commit;')", "def test_merge_sum(self):\n ars = self.ar[2009][11]['general']\n ars2 = awstats_reader.AwstatsReader(test_file_dir,\n 'joshuakugler.com')[2009][11]['general']\n self.assertEqual(ars.merge(ars2, 'LastUpdate', 'parsed'), 1262637)", "def copyTransactionsFrom(self, other, verbose=0):\n ZODB.BaseStorage.copy(other, self, verbose)", "def serialize_snapshot(self, snapshot, fields=None, version=None):\n fields = fields or self.snapshot_fields\n version = version or self.snapshot_version\n serialized_snapshot = serializers.serialize(\n 'python', [snapshot], fields=fields\n )[0]\n serialized_snapshot['version'] = version\n serialized_snapshot['extra_fields'] = {}\n return serialized_snapshot", "def merge(*args):\n return _libsbml.Unit_merge(*args)", "def add(self, transaction):\n if isinstance(transaction, Transaction):\n # If the transaction already exists\n if(transaction.hash in self.transaction_index):\n print(\"Debug: The transaction already exists in the list\")\n return None\n\n self.transaction_list.append(transaction)\n size = len(self.transaction_list)-1\n self.transaction_index[transaction.hash] = size\n else:\n raise Exception(\"Error: not a transaction\")", "def get_transaction_data():\n data = parse_json()\n income_instances = create_transactions(data['incomes'])\n expense_instances = create_transactions(data['expenses'])\n for expense in expense_instances:\n expense.amount = -(expense.amount)\n transactions = income_instances + expense_instances\n return transactions", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.name = self.snapshot[0]\r\n self.size = self.snapshot[1]\r\n self.cells = self.snapshot[2]\r\n self.bucket_array.load_snapshot()", "def get_merged_data(self):\n return self._combinedata", "def snapshot(self, snapshot_id):\r\n return self.connection.create_dbsnapshot(snapshot_id, self.id)", "def snapshot(self):\n return {\"unit_name\": self.unit_name}", "def _merge(self, other: dict):\n self._storage = dict_merge(self._storage, other)", "def take_snapshot():\n df = scrape()\n for i in df.index:\n single = df.loc[i]\n # create or get locations\n loc, created = Location.objects.get_or_create(\n name=single['Location'],\n all_stands=single['Stands'],\n coordinates=single['Coords']\n )\n # add a new snapshot\n obj = Snapshot(\n location=loc,\n avail_bikes=single['Bikes'],\n free_stands=single['Free stands'],\n timestamp=datetime.now(tz=timezone('Europe/Warsaw'))\n )\n obj.save()", "def snapshot(snapshot_type, result_q, time_delta):", "def load(self):\n def load_transaction_info(info):\n description = info.description\n payee = info.payee\n checkno = info.checkno\n date = info.date\n transaction = Transaction(date, description, payee, checkno)\n transaction.notes = nonone(info.notes, '')\n for split_info in info.splits:\n account = split_info.account\n amount = split_info.amount\n if split_info.amount_reversed:\n amount = -amount\n memo = nonone(split_info.memo, '')\n split = Split(transaction, account, amount)\n split.memo = memo\n if split_info.reconciliation_date is not None:\n split.reconciliation_date = split_info.reconciliation_date \n elif split_info.reconciled: # legacy\n split.reconciliation_date = transaction.date\n split.reference = split_info.reference\n transaction.splits.append(split)\n while len(transaction.splits) < 2:\n transaction.splits.append(Split(transaction, None, 0))\n transaction.balance()\n transaction.mtime = info.mtime\n if info.reference is not None:\n for split in transaction.splits:\n if split.reference is None:\n split.reference = info.reference\n return transaction\n \n self._load()\n self.flush_account() # Implicit\n # Now, we take the info we have and transform it into model instances\n currencies = set()\n start_date = datetime.date.max\n for info in self.group_infos:\n group = Group(info.name, info.type)\n self.groups.append(group)\n for info in self.account_infos:\n account_type = info.type\n if account_type not in AccountType.All:\n account_type = AccountType.Asset\n account_currency = self.default_currency\n try:\n if info.currency:\n account_currency = Currency(info.currency)\n except ValueError:\n pass # keep account_currency as self.default_currency\n account = Account(info.name, account_currency, account_type)\n if info.group:\n account.group = self.groups.find(info.group, account_type)\n if info.budget:\n self.budget_infos.append(BudgetInfo(info.name, info.budget_target, info.budget))\n account.reference = info.reference\n account.account_number = info.account_number\n account.notes = info.notes\n currencies.add(account.currency)\n self.accounts.add(account)\n \n # Pre-parse transaction info. We bring all relevant info recorded at the txn level into the split level\n all_txn = self.transaction_infos + [r.transaction_info for r in self.recurrence_infos] +\\\n flatten([stripfalse(r.date2exception.values()) for r in self.recurrence_infos]) +\\\n flatten([r.date2globalchange.values() for r in self.recurrence_infos])\n for info in all_txn:\n split_accounts = [s.account for s in info.splits]\n if info.account and info.account not in split_accounts:\n info.splits.insert(0, SplitInfo(info.account, info.amount, info.currency, False))\n if info.transfer and info.transfer not in split_accounts:\n info.splits.append(SplitInfo(info.transfer, info.amount, info.currency, True))\n for split_info in info.splits:\n # this amount is just to determine the auto_create_type\n str_amount = split_info.amount\n if split_info.currency:\n str_amount += split_info.currency\n amount = self.parse_amount(str_amount, self.default_currency)\n auto_create_type = AccountType.Income if amount >= 0 else AccountType.Expense\n split_info.account = self.accounts.find(split_info.account, auto_create_type) if split_info.account else None\n currency = split_info.account.currency if split_info.account is not None else self.default_currency\n split_info.amount = self.parse_amount(str_amount, currency)\n if split_info.amount:\n currencies.add(split_info.amount.currency)\n \n self.transaction_infos.sort(key=attrgetter('date'))\n for date, transaction_infos in groupby(self.transaction_infos, attrgetter('date')):\n start_date = min(start_date, date)\n for position, info in enumerate(transaction_infos, start=1):\n transaction = load_transaction_info(info)\n self.transactions.add(transaction, position=position)\n \n # Scheduled\n for info in self.recurrence_infos:\n ref = load_transaction_info(info.transaction_info)\n recurrence = Recurrence(ref, info.repeat_type, info.repeat_every)\n recurrence.stop_date = info.stop_date\n for date, transaction_info in info.date2exception.items():\n if transaction_info is not None:\n exception = load_transaction_info(transaction_info)\n spawn = Spawn(recurrence, exception, date, exception.date)\n recurrence.date2exception[date] = spawn\n else:\n recurrence.delete_at(date)\n for date, transaction_info in info.date2globalchange.items():\n change = load_transaction_info(transaction_info)\n spawn = Spawn(recurrence, change, date, change.date)\n recurrence.date2globalchange[date] = spawn\n self.schedules.append(recurrence)\n # Budgets\n TODAY = datetime.date.today()\n fallback_start_date = datetime.date(TODAY.year, TODAY.month, 1)\n for info in self.budget_infos:\n account = self.accounts.find(info.account)\n if account is None:\n continue\n target = self.accounts.find(info.target) if info.target else None\n amount = self.parse_amount(info.amount, account.currency)\n start_date = nonone(info.start_date, fallback_start_date)\n budget = Budget(account, target, amount, start_date, repeat_type=info.repeat_type)\n budget.notes = nonone(info.notes, '')\n budget.stop_date = info.stop_date\n if info.repeat_every:\n budget.repeat_every = info.repeat_every\n self.budgets.append(budget)\n self._post_load()\n self.oven.cook(datetime.date.min, until_date=None)\n Currency.get_rates_db().ensure_rates(start_date, [x.code for x in currencies])", "def copy(self, other):\n assert isinstance(other, Snapshot)\n self.imp = other.imp", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.blockA = self.snapshot[0]\r\n self.blockB = self.snapshot[1]\r\n self.blockA_locked = self.snapshot[2]\r\n self.blockB_locked = self.snapshot[3]\r\n self.blockA_free = self.snapshot[4]\r\n self.blockB_free = self.snapshot[5]\r\n self.blockA_cells = self.snapshot[6]\r\n self.blockB_cells = self.snapshot[7]\r\n self.cut = self.snapshot[8]", "def snapshot(self):\n return (self.block_header.state_root, self.chaindb.snapshot())", "def snapshot_metadata(self):\n return self._snapshot_metadata", "def data_snapshot(self) -> Dict[str, Any]:\n self.__logger.debug('Eva.data_snapshot called')\n return self.__http_client.data_snapshot()", "def transaction(self):\n copy = self.copy()\n try:\n yield copy\n except TransactionRollback:\n del copy\n else:\n self.update(copy)", "def raw_transaction(self) -> CustomRawTransaction:\n enforce(self.is_set(\"raw_transaction\"), \"'raw_transaction' content is not set.\")\n return cast(CustomRawTransaction, self.get(\"raw_transaction\"))", "def transactions(self):\n return copy.deepcopy(self._transactions)", "def snapshot(self):\n pass", "def process_transaction(self, transaction):\n instrument = transaction.instrument\n if isinstance(instrument, Future):\n try:\n old_price = self._payout_last_sale_prices[instrument]\n except KeyError:\n self._payout_last_sale_prices[instrument] = transaction.price\n else:\n position = self.position_tracker.positions[instrument]\n amount = position.amount\n price = transaction.price\n\n self._cash_flow(\n self._calculate_payout(\n instrument.multiplier,\n amount,\n old_price,\n price,\n ),\n )\n\n if amount + transaction.amount == 0:\n del self._payout_last_sale_prices[instrument]\n else:\n self._payout_last_sale_prices[instrument] = price\n else:\n self._cash_flow(-(transaction.price * transaction.amount))\n\n self.position_tracker.execute_transaction(transaction)\n\n # we only ever want the dict form from now on\n transaction_dict = transaction.to_dict()\n try:\n self._processed_transactions[transaction.dt].append(\n transaction_dict,\n )\n except KeyError:\n self._processed_transactions[transaction.dt] = [transaction_dict]", "def AddSnapshot(self, price, date=datetime.datetime.now(), dividend=0., annualDividend=0.):\n self._AddSnapshot(self.Snapshot(price=price, date=date, dividend=dividend, annualDividend=annualDividend))", "def archive_ost_data(self, lmtdb):\n\n dataset_names = [\n 'datatargets/readbytes',\n 'datatargets/writebytes',\n 'fullness/bytes',\n 'fullness/bytestotal',\n 'fullness/inodes',\n 'fullness/inodestotal'\n ]\n\n self.init_datasets(dataset_names, lmtdb.ost_names)\n\n # Now query the OST_DATA table to get byte counts over the query time range\n results, columns = lmtdb.get_ost_data(self.query_start, self.query_end_plusplus)\n\n # Index the columns to speed up insertion of data\n col_map = {}\n try:\n for db_col in ['TIMESTAMP', 'OST_ID', 'READ_BYTES',\n 'WRITE_BYTES', 'KBYTES_USED', 'KBYTES_FREE',\n 'INODES_USED', 'INODES_FREE']:\n col_map[db_col] = columns.index(db_col)\n except ValueError:\n raise ValueError(\"LMT database schema does not match expectation\")\n\n # Loop through all the results of the timeseries query\n for row in results:\n if isstr(row[col_map['TIMESTAMP']]):\n # SQLite stores timestamps as a unicode string\n timestamp = datetime.datetime.strptime(row[col_map['TIMESTAMP']],\n \"%Y-%m-%d %H:%M:%S\")\n else:\n # MySQL timestamps are automatically converted to datetime.datetime\n timestamp = row[col_map['TIMESTAMP']]\n target_name = lmtdb.ost_id_map[row[col_map['OST_ID']]]\n for dataset_name in dataset_names:\n target_dbcol = self.config[dataset_name].get('column')\n if target_dbcol is not None:\n self[dataset_name].insert_element(\n timestamp,\n target_name,\n row[col_map[target_dbcol]])\n elif dataset_name == 'fullness/bytestotal':\n self[dataset_name].insert_element(\n timestamp,\n target_name,\n row[col_map['KBYTES_USED']] + row[col_map['KBYTES_FREE']])\n elif dataset_name == 'fullness/inodestotal':\n self[dataset_name].insert_element(\n timestamp,\n target_name,\n row[col_map['INODES_USED']] + row[col_map['INODES_FREE']])\n else:\n errmsg = \"%s in self.config but missing 'column' setting\" % dataset_name\n raise KeyError(errmsg)", "def snapshot(self, agent_memory):\n\n read_cmd = \"SELECT \"\n for r in self.TABLE_COLUMNS:\n read_cmd += r + \", \"\n read_cmd = read_cmd.strip(\", \")\n read_cmd += \" FROM \" + self.TABLE + \" WHERE uuid=?\"\n data = agent_memory._db_read_one(read_cmd, self.memid)\n if not data:\n raise (\"tried to snapshot nonexistent memory\")\n\n archive_memid = self.new(agent_memory, snapshot=True)\n new_data = list(data)\n new_data[0] = archive_memid\n\n if hasattr(self, \"ARCHIVE_TABLE\"):\n archive_table = self.ARCHIVE_TABLE\n else:\n archive_table = self.TABLE\n write_cmd = \"INSERT INTO \" + archive_table + \"(\"\n qs = \"\"\n for r in self.TABLE_COLUMNS:\n write_cmd += r + \", \"\n qs += \"?, \"\n write_cmd = write_cmd.strip(\", \")\n write_cmd += \") VALUES (\" + qs.strip(\", \") + \")\"\n agent_memory._db_write(write_cmd, *new_data)\n link_archive_to_mem(agent_memory, self.memid, archive_memid)", "def market_snap(self, timestamp = -1):\n # refresh account states\n self.trading_acc1.sync_account_with_exh()\n self.trading_acc2.sync_account_with_exh()\n\n signal__arbitrage_delta = self.get_signal__arbitrage_delta()\n signal__gdax_has_usd = self.get_signal__gdax_has_usd()\n signal__gdax_has_eth = self.get_signal__gdax_has_eth()\n signal__cex_has_eth = self.get_signal__cex_has_eth()\n\n def mk_audit_js():\n gdax_account = self.trading_acc1.get_account()\n cex_account = self.trading_acc2.get_account()\n transaction_t = epoch.current_milli_time() if timestamp == -1 else timestamp\n\n audit_js = OrderedDict()\n audit_js['strategy_run_id'] = self.run_id\n audit_js['timestamp'] = epoch.to_str(transaction_t)\n audit_js['timestamp__long'] = transaction_t\n audit_js['ticker'] = self.ticker\n audit_js['strategy_info'] = self._strategy_info\n\n audit_js['signal'] = OrderedDict()\n audit_js['signal']['signal__gdax_has_usd'] = signal__gdax_has_usd\n audit_js['signal']['signal__gdax_has_eth'] = signal__gdax_has_eth\n audit_js['signal']['signal__cex_has_eth'] = signal__cex_has_eth\n audit_js['signal']['signal__arbitrage_delta'] = signal__arbitrage_delta\n\n audit_js['total_usd__num'] = gdax_account.js['usd__num'] + cex_account.js['usd__num']\n audit_js['total_eth__num'] = gdax_account.js['eth__num'] + cex_account.js['eth__num']\n audit_js['gdax_account'] = gdax_account.js\n audit_js['cex_account'] = cex_account.js\n return audit_js\n\n snap_again = False # Only repeat if we have an gdax buy action\n if signal__gdax_has_usd['signal'] and signal__arbitrage_delta['signal']:\n exec_context = self.exec_gdax_buy(timestamp)\n snap_again = True\n\n # Audit\n audit_js = mk_audit_js()\n audit_js['action'] = exec_context\n audit = AuditTradeModel.build(audit_js)\n logger.info('-----Executed GDAX Buy-----')\n logger.info(audit)\n logger.info('---------------------------')\n audit.db_save(es)\n\n if signal__gdax_has_eth['signal']:\n exec_context = self.exec_eth_transfer()\n\n # Audit\n audit_js = mk_audit_js()\n audit_js['action'] = exec_context\n audit = AuditTradeModel.build(audit_js)\n logger.info('-----Executed ETH TRANSFER-----')\n logger.info(audit)\n logger.info('---------------------------')\n audit.db_save(es)\n\n if signal__cex_has_eth['signal']:\n exec_context = self.exec_cex_sell(timestamp)\n\n # Audit\n audit_js = mk_audit_js()\n audit_js['action'] = exec_context\n audit = AuditTradeModel.build(audit_js)\n logger.info('-----Executed CEX Sell-----')\n logger.info(audit)\n logger.info('---------------------------')\n audit.db_save(es)\n\n # Extra logging\n audit_js = mk_audit_js()\n logger.info('post-snapping states: \\n' + json.dumps(audit_js, indent=2))\n\n return snap_again", "def transaction(self):\n return Transaction(self)", "def _AddSnapshot(self, snapshot):\n if self._history.count(snapshot) == 0:\n self._history.append(snapshot)", "def all_transactions(self):\n self._update()\n with self.all_tx_lock:\n all_tx_copy = copy.deepcopy(self._all_transactions)\n return all_tx_copy", "async def test_include_transaction(self):\n self.set_source_parameter(\"transactions_to_include\", [self.API1])\n response = await self.collect(get_request_json_return_value=self.JMETER_JSON)\n self.assert_measurement(response, value=\"123\", entities=[])", "async def update_trade_stats(self):\n\n summary_keys = [base for base in config['min_base_volumes']] + ['global']\n summaries = {\n key: {\n 'open_count': 0,\n 'buys': 0,\n 'rebuys': 0,\n 'sells': 0,\n 'collect_sells': 0,\n 'soft_stop_sells': 0,\n 'total_profit': 0.0,\n 'total_loss': 0.0,\n 'total_fees': 0.0,\n 'balancer_refills': 0,\n 'balancer_remits': 0,\n 'balancer_stop_losses': 0,\n 'balancer_profit': 0.0,\n 'balancer_loss': 0.0,\n 'balancer_fees': 0.0,\n } for key in summary_keys\n }\n\n for pair in self.trades:\n if pair not in self.trade_stats[self.time_prefix]:\n continue\n\n base = pair.split('-', 1)[0]\n open_count = len(self.trades[pair]['open'])\n\n summaries[base]['open_count'] += open_count\n summaries[base]['buys'] += self.trade_stats[self.time_prefix][pair]['buys']\n summaries[base]['rebuys'] += self.trade_stats[self.time_prefix][pair]['rebuys']\n summaries[base]['sells'] += self.trade_stats[self.time_prefix][pair]['sells']\n summaries[base]['collect_sells'] += self.trade_stats[self.time_prefix][pair]['collect_sells']\n summaries[base]['soft_stop_sells'] += self.trade_stats[self.time_prefix][pair]['soft_stop_sells']\n summaries[base]['total_profit'] += self.trade_stats[self.time_prefix][pair]['total_profit']\n summaries[base]['total_loss'] += self.trade_stats[self.time_prefix][pair]['total_loss']\n summaries[base]['total_fees'] += self.trade_stats[self.time_prefix][pair]['total_fees']\n summaries[base]['balancer_refills'] += self.trade_stats[self.time_prefix][pair]['balancer_refills']\n summaries[base]['balancer_remits'] += self.trade_stats[self.time_prefix][pair]['balancer_remits']\n summaries[base]['balancer_profit'] += self.trade_stats[self.time_prefix][pair]['balancer_profit']\n summaries[base]['balancer_loss'] += self.trade_stats[self.time_prefix][pair]['balancer_loss']\n summaries[base]['balancer_fees'] += self.trade_stats[self.time_prefix][pair]['balancer_fees']\n\n summaries['global']['open_count'] += open_count\n summaries['global']['buys'] += self.trade_stats[self.time_prefix][pair]['buys']\n summaries['global']['rebuys'] += self.trade_stats[self.time_prefix][pair]['rebuys']\n summaries['global']['sells'] += self.trade_stats[self.time_prefix][pair]['sells']\n summaries['global']['collect_sells'] += self.trade_stats[self.time_prefix][pair]['collect_sells']\n summaries['global']['soft_stop_sells'] += self.trade_stats[self.time_prefix][pair]['soft_stop_sells']\n summaries['global']['total_profit'] += self.trade_stats[self.time_prefix][pair]['total_profit']\n summaries['global']['total_loss'] += self.trade_stats[self.time_prefix][pair]['total_loss']\n summaries['global']['total_fees'] += self.trade_stats[self.time_prefix][pair]['total_fees']\n summaries['global']['balancer_refills'] += self.trade_stats[self.time_prefix][pair]['balancer_refills']\n summaries['global']['balancer_remits'] += self.trade_stats[self.time_prefix][pair]['balancer_remits']\n summaries['global']['balancer_profit'] += self.trade_stats[self.time_prefix][pair]['balancer_profit']\n summaries['global']['balancer_loss'] += self.trade_stats[self.time_prefix][pair]['balancer_loss']\n summaries['global']['balancer_fees'] += self.trade_stats[self.time_prefix][pair]['balancer_fees']\n\n for key in summaries:\n self.trade_stats[self.time_prefix][key]['buys'] = summaries[key]['buys']\n self.trade_stats[self.time_prefix][key]['rebuys'] = summaries[key]['rebuys']\n self.trade_stats[self.time_prefix][key]['sells'] = summaries[key]['sells']\n self.trade_stats[self.time_prefix][key]['collect_sells'] = summaries[key]['collect_sells']\n self.trade_stats[self.time_prefix][key]['soft_stop_sells'] = summaries[key]['soft_stop_sells']\n self.trade_stats[self.time_prefix][key]['total_profit'] = summaries[key]['total_profit']\n self.trade_stats[self.time_prefix][key]['total_loss'] = summaries[key]['total_loss']\n self.trade_stats[self.time_prefix][key]['total_fees'] = summaries[key]['total_fees']\n self.trade_stats[self.time_prefix][key]['balancer_refills'] = summaries[key]['balancer_refills']\n self.trade_stats[self.time_prefix][key]['balancer_remits'] = summaries[key]['balancer_remits']\n self.trade_stats[self.time_prefix][key]['balancer_profit'] = summaries[key]['balancer_profit']\n self.trade_stats[self.time_prefix][key]['balancer_loss'] = summaries[key]['balancer_loss']\n self.trade_stats[self.time_prefix][key]['balancer_fees'] = summaries[key]['balancer_fees']\n\n if summaries[key]['open_count'] > self.trade_stats[self.time_prefix][key]['most_open']:\n self.trade_stats[self.time_prefix][key]['most_open'] = summaries[key]['open_count']\n\n filter_items = [pair for pair in self.trades] + [base for base in config['min_base_volumes']] + ['global']\n self.save_attr('trade_stats', max_depth=2, filter_items=filter_items, filter_keys=[self.time_prefix])", "def merge_quantity(self, session, source_qty):\n qty_cls = source_qty.__class__\n try:\n target_qty = session.query(qty_cls).\\\n filter(and_(qty_cls.atom==self,\n qty_cls.data_source==source_qty.data_source)).one()\n target_qty.quantity = source_qty.quantity\n target_qty.std_dev = source_qty.std_dev\n\n except NoResultFound:\n\n self.quantities.append(source_qty)", "def merge(\n self,\n instance: _O,\n *,\n load: bool = True,\n options: Optional[Sequence[ORMOption]] = None,\n ) -> _O:\n\n if self._warn_on_events:\n self._flush_warning(\"Session.merge()\")\n\n _recursive: Dict[InstanceState[Any], object] = {}\n _resolve_conflict_map: Dict[_IdentityKeyType[Any], object] = {}\n\n if load:\n # flush current contents if we expect to load data\n self._autoflush()\n\n object_mapper(instance) # verify mapped\n autoflush = self.autoflush\n try:\n self.autoflush = False\n return self._merge(\n attributes.instance_state(instance),\n attributes.instance_dict(instance),\n load=load,\n options=options,\n _recursive=_recursive,\n _resolve_conflict_map=_resolve_conflict_map,\n )\n finally:\n self.autoflush = autoflush", "def set_cache_data(self) -> None:\n if isinstance(self.tx_storage, TransactionCacheStorage):\n hits = self.tx_storage.stats.get(\"hit\")\n misses = self.tx_storage.stats.get(\"miss\")\n if hits:\n self.transaction_cache_hits = hits\n if misses:\n self.transaction_cache_misses = misses", "def merge(self, session, source_state, source_dict, dest_state,\n dest_dict, load, _recursive):\n\n pass", "def populate_transaction(\n self,\n label: dict,\n txid: int,\n read: str,\n write: str,\n transaction: list,\n action: str,\n ):\n method_that_access_table = transaction[\"stacktrace\"][-1]\n class_name_next = re.sub(\n \"/\", \".\", method_that_access_table[\"method\"].split(\", \")[1][1:]\n )\n method_name_next = (\n method_that_access_table[\"method\"].split(\", \")[2].split(\"(\")[0]\n )\n method_signature = \".\".join([class_name_next, method_name_next])\n\n the_sql_query = transaction[\"sql\"]\n\n for tx_read in read:\n if tx_read.casefold() in the_sql_query.casefold():\n self.populate_transaction_read(\n method_signature,\n txid,\n tx_read.casefold(),\n action,\n the_sql_query.casefold(),\n )\n for tx_write in write:\n if tx_write.casefold() in the_sql_query.casefold():\n self.populate_transaction_write(\n method_signature,\n txid,\n tx_write.casefold(),\n action,\n the_sql_query.casefold(),\n )", "def __enter__(self):\n if self._transaction_count == 0:\n self._db_copy = self.db._read()\n self._transaction_count += 1\n return self", "async def test_include_transaction(self):\n self.set_source_parameter(\"transactions_to_include\", [self.API1])\n response = await self.collect(get_request_json_return_value=self.GATLING_JSON)\n self.assert_measurement(response, value=\"1\", entities=self.expected_entities[:1])", "def __init__(self, index, transactions, timestamp):\n self.index = index \n self.transactions = transactions\n self.timestamp = timestamp \n self.previous_hash = previous_hash", "def update_data():\n etf_prices = get_prices(start=START_DATE, end=END_DATE)\n etf_returns = compute_returns(etf_prices)\n merged_etf_data = etf_prices.merge(etf_returns, right_index=True, left_index=True)\n indicators = compute_indicators(merged_etf_data) # this uses the \"ta\" lib, but it does not need\n # to be imported\n merged_etf_data = merged_etf_data.merge(indicators, right_index=True, left_index=True)\n vix_data = get_vix()\n data = merged_etf_data.merge(vix_data, right_index=True, left_index=True)\n data.to_csv('Data/database.csv')\n return", "def write(self, session: Session = None):\n session.merge(self)", "def merge_logs(self):\n ourlog = LogData()\n for l in self.data_set:\n ourlog.entries = ourlog.entries + l.entries\n ourlog.sort_time()\n self.finalized_data = ourlog", "def getrawtransaction(self, txid, verbose=True):\n if verbose:\n return TransactionInfo(**self.proxy.getrawtransaction(txid, 1))\n return self.proxy.getrawtransaction(txid, 0)", "def begin(self):\n if self.tx_id is None:\n self.tx_id = yield self.conn.query('*begin-tx*')\n yield self.conn.query('START TRANSACTION WITH CONSISTENT SNAPSHOT', tx_id=self.tx_id)", "def save_transactions(self, new_transactions: Sequence[ShareTransaction]):\n new_transactions = pd.DataFrame(new_transactions)\n self._transactions_dataframe = self._transactions_dataframe.append(new_transactions)\n\n self._dataframe_io.save_dataframe(self._transactions_dataframe)", "def transaction(self, transaction=None):\n if self._transaction is None:\n if transaction:\n self._transaction = transaction\n return self._transaction\n else:\n raise TransactionNotStartedError(\"Transaction not yet started!\")\n else:\n if transaction and transaction != self._transaction:\n raise TransactionAlreadyStartedError(\"Transaction already started, cannot set!\")\n return self._transaction", "def restore(self, oid, serial, data, version, prev_txn, transaction):\n assert not version\n self._check_trans(transaction, 'restore')\n self._async('restorea', oid, serial, data, prev_txn, id(transaction))", "def take_snapshot(self):\r\n self.snapshot = self.blockA, self.blockB, self.blockA_locked, self.blockB_locked, self.blockA_free, \\\r\n self.blockB_free, copy.copy(self.blockA_cells), copy.copy(self.blockB_cells), self.cut", "def to_json(self):\n\n result = super(Snapshot, self).to_json()\n result.update({\n 'snapshot': self.snapshot.to_json(),\n })\n return result", "def add_transaction(self, tx_json):\n recv_tx = Transaction.from_json(tx_json)\n if not recv_tx.verify():\n raise Exception(\"New transaction failed signature verification.\")\n with self.all_tx_lock:\n if tx_json in self._all_transactions:\n print(f\"{self.name} - Transaction already exist in pool.\")\n return\n self._all_transactions.add(tx_json)", "def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction:\n raise NotImplementedError", "def mergeAggregatedCsvData(self, contexts, obj, aggData1, aggData2):\n return aggData1 + aggData2", "def get_snapshot(self, name=None, snapshot_id=None):\n if snapshot_id:\n return self._search_snapshot(key=\"snapshot_id\", value=snapshot_id)\n elif name:\n return self._search_snapshot(key=\"name\", value=name)\n else:\n raise ValueError(\"name or snapshot_id must be provided\")", "def op_transfer(cls, op, tx_idx, num, date):\n result = cls._validated(op, tx_idx, num, date)\n if not result:\n return\n\n record, author_id, permlink = result\n\n # add payment record and return post id\n sql = \\\n\"\"\"\nINSERT INTO hive_payments(block_num, tx_idx, post_id, from_account, to_account, amount, token) SELECT\n bn, tx, hp.id, fa, ta, am, tkn\nFROM\n( \n SELECT bn, tx, hpd.id, auth_id, fa, ta, am, tkn\n FROM (VALUES (:_block_num, :_tx_idx, :_permlink, :_author_id , :_from_account , :_to_account , :_amount, :_token)) \n AS v(bn, tx, perm, auth_id, fa, ta, am, tkn) \n JOIN hive_permlink_data hpd\n ON v.perm = hpd.permlink\n) as vv(bn, tx, hpd_id, auth_id, fa, ta, am, tkn )\nJOIN hive_posts hp\nON hp.author_id=vv.auth_id AND hp.permlink_id=vv.hpd_id\nRETURNING post_id\n\"\"\"\n\n post_id = DB.query_one(sql, \n _block_num=record['block_num'], \n _tx_idx=record['tx_idx'], \n _permlink=permlink, \n _author_id=author_id,\n _from_account=record['from_account'],\n _to_account=record['to_account'],\n _amount=record['amount'],\n _token=record['token']\n )\n\n amount = record['amount']\n if not isinstance(amount, float):\n amount = float(amount)\n\n if amount != 0.0 and post_id is not None:\n # update post record\n sql = \"UPDATE hive_posts SET promoted = promoted + :val WHERE id = :id\"\n DB.query(sql, val=amount, id=post_id)", "def transaction_data(self):\n return list(map(lambda transaction:transaction.to_json(), self.transaction_map.values()))", "def test_begin_ro_transaction_snapshot(self):\n\n instruction = Instruction(\"BeginRO(T1)\")\n self.transaction_manager.execute(instruction)\n for site in self.transaction_manager.sites.values():\n self.assertTrue(len(site.data_manager.variables) > 0)\n for variable_identifier in site.data_manager.variables:\n self.assertTrue(variable_identifier in self.transaction_manager.readonly_snapshots[\"T1\"])", "def gettransaction(self, txid):\n return TransactionInfo(**self.proxy.gettransaction(txid))", "def raw_get_transaction(cls, txid):\n r = requests.get(cls.MAIN_TX_API.format(txid), timeout=DEFAULT_TIMEOUT)\n r.raise_for_status() # pragma: no cover\n return r.json()", "def snapshot(self):\n snapshot = super(VirtualMachineDAO, self).snapshot()\n for entry in snapshot:\n vm = entry.get(VirtualMachineDAO.INNER_OBJ)\n vm['network'] = VMNetworkDAO(self.session, vm.get(VirtualMachineDAO.FOREIGN_KEY)).snapshot()\n return snapshot", "def take_snapshot(self):\r\n self.snapshot = self.name, self.size, copy.copy(self.cells)\r\n self.bucket_array.take_snapshot()", "def deserialize(cls, raw_transaction: bytes) -> Transaction:\n return cls.from_solders(SoldersTx.from_bytes(raw_transaction))", "def take_snapshot(self):\r\n self.snapshot = self.max_gain, self.__dup_array(), copy.copy(self.free_cell_list)", "def bundle(self):\n if len(self.data) > 0:\n self.journal = self.data[0].append(self.data[1:])\n self.journal = self.journal[self.journal['Type'] == 'Current Title']\n self.journal = self.journal[self.journal['Rights'] != 'Mineral']\n self.journal = self.journal[~self.journal.index.duplicated(keep='first')]\n self.journal = self.journal.sort_values(by=['Registration Date'], ascending=False)\n return self.journal", "def load_transactions(self, address, update=True, verbose=False, **kwargs):\n if self.apikey is None:\n update = False\n if verbose:\n print('load_transactions', address)\n fn = os.path.join(self.cache_dir, address + '.json')\n startblock = None\n transactions = []\n if os.path.exists(fn):\n with open(fn) as f:\n try:\n transactions = json.load(f)\n except json.decoder.JSONDecodeError:\n if verbose:\n print('ignoring error while loading', fn)\n pass\n if not update:\n return transactions\n if len(transactions):\n startblock = max([int(e['blockNumber']) for e in transactions])\n if verbose:\n print('starting from cache at', startblock, 'with', len(transactions))\n # add new transactions\n new_transactions = self.fetch_transactions(address, startblock=startblock, verbose=verbose, **kwargs)\n # dedupe\n if len(new_transactions) > 0:\n transactions.extend(new_transactions)\n transactions = list({e['hash']:e for e in transactions}.values())\n safe_dump(fn, transactions)\n return transactions", "def snapshot(self,):\n self.stack.append((self.pos,self.dataptr))", "def get_snapshot_object(session, key, snapshot=None):\n # type: (Session, Text, Optional[Text]) -> Any\n url_tail = \"/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_OBJECTS,\n )\n return _get_stream(session, url_tail, {CoordConstsV2.QP_KEY: key})", "def create_snapshot(store, dataset, snapshot, description_fields, snapshot_changes):\n validate_snapshot_name(store, dataset, snapshot)\n validate_datalad_config(store, dataset)\n update_description(store, dataset, description_fields)\n update_changes(store, dataset, snapshot, snapshot_changes)\n save_snapshot(store, dataset, snapshot)\n return get_snapshot(store, dataset, snapshot)", "def snapshot(self):\n return self.journal.create_checkpoint()", "def transactions(self):\r\n return tx.Transactions(self)", "def get_usage_data(self):\n with self._lock:\n data_copy = self._data.copy()\n return data_copy", "def get_total_data():\n return pd.merge(compute_aggregate_load_data(), compute_aggregate_weather_data(),on=\"Date\")", "def test_total_values_for_two_separate_transactions():\n ph = PositionHandler()\n\n # Asset 1\n asset1 = 'EQ:AMZN'\n dt1 = pd.Timestamp('2015-05-06 15:00:00', tz=pytz.UTC)\n trans_pos_1 = Transaction(\n asset1,\n quantity=75,\n dt=dt1,\n price=483.45,\n order_id=1,\n commission=15.97\n )\n ph.transact_position(trans_pos_1)\n\n # Asset 2\n asset2 = 'EQ:MSFT'\n dt2 = pd.Timestamp('2015-05-07 15:00:00', tz=pytz.UTC)\n trans_pos_2 = Transaction(\n asset2,\n quantity=250,\n dt=dt2,\n price=142.58,\n order_id=2,\n commission=8.35\n )\n ph.transact_position(trans_pos_2)\n\n # Check all total values\n assert ph.total_market_value() == 71903.75\n assert np.isclose(ph.total_unrealised_pnl(), -24.31999999999971)\n assert ph.total_realised_pnl() == 0.0\n assert np.isclose(ph.total_pnl(), -24.31999999999971)", "def aggregate(self):\n data_to_track = {}\n for possession in self.possessions_to_track_aggregate:\n data_to_track[possession] = self._haves[possession]\n\n for variable in self.variables_to_track_aggregate:\n try:\n data_to_track[variable] = self.__dict__[variable]\n except KeyError:\n pass\n self.database_connection.put([\"aggregate\",\n data_to_track,\n self.group,\n self.round])", "def Unit_merge(*args):\n return _libsbml.Unit_merge(*args)", "def transaction(self, uuid):\r\n return tx.Transaction(self, uuid)", "def sign_tx(self, tx):\n if self.privkey:\n log.info('signing tx', tx=tx, account=self)\n tx.sign(self.privkey)\n else:\n raise ValueError('Locked account cannot sign tx')", "def transaction(self, context: InjectionContext = None) -> \"ProfileSession\":", "def txn_data(df, txns):\n return df[df.transaction_id.isin(txns)].copy()", "def cache_raw_txs(self, cli_txs): \n # Get list of all tx ids\n txids = list(dict.fromkeys(cli_txs.keys()))\n tx_count = len(txids)\n\n # If there are new transactions (if the transations count changed)\n if tx_count != self.cache[\"tx_count\"]:\n for txid in txids:\n # Cache each tx, if not already cached.\n # Data is immutable (unless reorg occurs) and can be saved in a file for permanent caching\n if txid not in self.cache[\"raw_transactions\"]:\n # Call Bitcoin Core to get the \"raw\" transaction - allows to read detailed inputs and outputs\n raw_tx_hex = self.cli.gettransaction(txid)[\"hex\"]\n raw_tx = self.cli.decoderawtransaction(raw_tx_hex)\n # Some data (like fee and category, and when unconfirmed also time) available from the `listtransactions`\n # command is not available in the `getrawtransacion` - so add it \"manually\" here.\n if \"fee\" in cli_txs[txid]:\n raw_tx[\"fee\"] = cli_txs[txid][\"fee\"]\n if \"category\" in cli_txs[txid]:\n raw_tx[\"category\"] = cli_txs[txid][\"category\"]\n if \"time\" in cli_txs[txid]:\n raw_tx[\"time\"] = cli_txs[txid][\"time\"]\n\n if \"blockhash\" in cli_txs[txid]:\n raw_tx[\"block_height\"] = self.cli.getblockheader(cli_txs[txid][\"blockhash\"])[\"height\"]\n else:\n raw_tx[\"block_height\"] = -1\n\n # Loop on the transaction's inputs\n # If not a coinbase transaction:\n # Get the the output data corresponding to the input (that is: input_txid[output_index])\n tx_ins = []\n for vin in raw_tx[\"vin\"]:\n # If the tx is a coinbase tx - set `coinbase` to True\n if \"coinbase\" in vin:\n raw_tx[\"coinbase\"] = True\n break\n # If the tx is a coinbase tx - set `coinbase` to True\n vin_txid = vin[\"txid\"]\n vin_vout = vin[\"vout\"]\n try:\n raw_tx_hex = self.cli.gettransaction(vin_txid)[\"hex\"]\n tx_in = self.cli.decoderawtransaction(raw_tx_hex)[\"vout\"][vin_vout]\n tx_in[\"txid\"] = vin[\"txid\"]\n tx_ins.append(tx_in)\n except:\n pass\n # For each output in the tx_ins list (the tx inputs in their output \"format\")\n # Create object with the address, amount, and whatever the address belongs to the wallet (`internal=True` if it is).\n raw_tx[\"from\"] = [{\n \"address\": out[\"scriptPubKey\"][\"addresses\"][0],\n \"amount\": out[\"value\"],\n \"internal\": out[\"scriptPubKey\"][\"addresses\"][0] in self.wallet_addresses\n } for out in tx_ins]\n # For each output in the tx (`vout`)\n # Create object with the address, amount, and whatever the address belongs to the wallet (`internal=True` if it is).\n raw_tx[\"to\"] = [({\n \"address\": out[\"scriptPubKey\"][\"addresses\"][0],\n \"amount\": out[\"value\"],\n \"internal\": out[\"scriptPubKey\"][\"addresses\"][0] in self.wallet_addresses\n }) for out in raw_tx[\"vout\"] if \"addresses\" in out[\"scriptPubKey\"]]\n # Save the raw_transaction to the cache\n cache[self.walletname][\"raw_transactions\"][txid] = raw_tx\n # Set the tx count to avoid unnecessary indexing\n cache[self.walletname][\"tx_count\"] = tx_count\n # Set the tx changed to indicate the there are new transactions to cache\n cache[self.walletname][\"tx_changed\"] = True\n else:\n # Set the tx changed to False to avoid unnecessary indexing\n cache[self.walletname][\"tx_changed\"] = False\n\n # If unconfirmed transactions were mined, assign them their block height\n blocks = self.cli.getblockcount()\n if blocks != self.cache[\"last_block\"]:\n for txid in self.cache[\"raw_transactions\"]:\n if self.cache[\"raw_transactions\"][txid][\"block_height\"] == -1 and \"blockhash\" in cli_txs[txid]:\n height = self.cli.getblockheader(cli_txs[txid][\"blockhash\"])[\"height\"]\n cache[self.walletname][\"raw_transactions\"][txid][\"block_height\"] = height\n cache[self.walletname][\"raw_tx_block_update\"][txid] = height\n cache[self.walletname][\"last_block\"] = blocks\n\n return self.cache[\"raw_transactions\"]", "def gettxout(self, txid, index, mempool=True):\n tx = self.proxy.gettxout(txid, index, mempool)\n if tx != None:\n return TransactionInfo(**tx)\n else:\n return TransactionInfo()", "def add_transaction(self, block, transaction):\n cmd = \"\"\"INSERT INTO %s(%s, %s, %s, %s, %s, %s)\n VALUES(?,?,?,?,?,?);\"\"\" %(TABLE_TRANSACTIONS,\n COL_TRANSACTION_BLOCK,\n COL_TRANSACTION_SENDER,\n COL_TRANSACTION_RECEIVER,\n COL_TRANSACTION_AMOUNT,\n COL_TRANSACTION_SUB_TIME,\n COL_TRANSACTION_VER_TIME)\n self.__dbcursor.execute(cmd, (block, transaction.sender,\n transaction.receiver,\n transaction.amount,\n transaction.submitted_time,\n transaction.verified_time))" ]
[ "0.5806089", "0.50894815", "0.5036111", "0.50232536", "0.49642876", "0.4911518", "0.487619", "0.48526537", "0.48363346", "0.48321915", "0.47403112", "0.47268453", "0.46747193", "0.4672916", "0.46666792", "0.4641363", "0.4636667", "0.46207553", "0.46078375", "0.46022642", "0.4585646", "0.45775586", "0.45727476", "0.456886", "0.45646688", "0.4561287", "0.45502204", "0.4544669", "0.4539312", "0.4538765", "0.45370367", "0.45339113", "0.45322245", "0.4520294", "0.4515589", "0.45152098", "0.45041603", "0.44997835", "0.449748", "0.44964138", "0.44916368", "0.4482156", "0.4481429", "0.44807905", "0.4478595", "0.4474729", "0.4470679", "0.44680372", "0.44580165", "0.44561625", "0.44516245", "0.44469973", "0.4444978", "0.4432881", "0.44318268", "0.442862", "0.4426267", "0.4424733", "0.44163692", "0.441272", "0.4399066", "0.43928733", "0.43783924", "0.43728644", "0.43625012", "0.43624353", "0.43621224", "0.43603572", "0.43585414", "0.43567804", "0.43520996", "0.43514484", "0.4350569", "0.43415877", "0.4320113", "0.43175238", "0.43162605", "0.43158317", "0.43150857", "0.43142375", "0.4306963", "0.42993632", "0.42966196", "0.42938808", "0.4289861", "0.42887494", "0.4267962", "0.42630804", "0.42599922", "0.4257649", "0.42573604", "0.4254237", "0.4246439", "0.42413345", "0.42374438", "0.42366633", "0.42301133", "0.42241767", "0.42204762", "0.4216119" ]
0.6420531
0
Performs a "rollback" merge after a failed harvest. Snapshot is a copy of the main StatsEngine data that we attempted to harvest, but failed. Not all types of data get merged during a rollback.
def rollback(self, snapshot): if not self.__settings: return _logger.debug('Performing rollback of data into ' 'subsequent harvest period. Metric data and transaction events' 'will be preserved and rolled into next harvest') self.merge_metric_stats(snapshot) self._merge_transaction_events(snapshot, rollback=True) self._merge_synthetics_events(snapshot, rollback=True) self._merge_error_events(snapshot) self._merge_custom_events(snapshot, rollback=True) self._merge_span_events(snapshot, rollback=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rollback(self, stage, enodes, exception):", "def test_backup_merge_with_restore(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 2\n output, error = self.backup_restore()\n if error:\n self.fail(\"Restoring backup failed: {0}\".format(error))\n self.log.info(\"Finished restoring backup before merging\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.backupset.start = 1\n self.backupset.end = 1\n rest = RestConnection(self.backupset.restore_cluster_host)\n rest.flush_bucket()\n output, error = self.backup_restore()\n if error:\n self.fail(\"Restoring backup failed\")\n self.log.info(\"Finished restoring backup after merging\")", "def rollback(self):\n pass", "def rollback(self):\n raise NotImplementedError", "def test_backup_merge_with_unmerged(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 2\n self.log.info(\"Merging existing incremental backups\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.log.info(\"Taking more backups\")\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 3\n self.log.info(\"Merging new backups into already merged backup\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.log.info(\"Successfully merged new backups with already merged backup\")", "def test_fail_transaction(self):\n source_wallet = self.source_user.wallets.last()\n target_wallet = self.target_user.wallets.last()\n\n source_balance_init = source_wallet.balance\n target_balance_init = target_wallet.balance\n\n data = {\n 'initial_amount': 1100,\n 'source_wallet': source_wallet,\n 'target_wallet': target_wallet,\n }\n execute_wallet_transaction(data)\n\n source_wallet.refresh_from_db()\n target_wallet.refresh_from_db()\n\n self.assertTrue(source_balance_init == source_wallet.balance)\n self.assertTrue(target_balance_init == target_wallet.balance)\n\n self.assertEqual(source_wallet.outcome_transactions.last().status, TRANSACTION_FAIL_STATUS)", "def rollback(self, context: 'IconScoreContext', _block_height: int, _block_hash: bytes):\n Logger.info(tag=ROLLBACK_LOG_TAG, msg=\"rollback() start\")\n\n self.prep_address_converter: 'PRepAddressConverter' = context.storage.meta.get_prep_address_converter(context)\n\n self.preps = self._load_preps(context)\n self.term = self._load_term(context)\n\n Logger.info(tag=ROLLBACK_LOG_TAG, msg=f\"rollback() end: {self.term}\")", "def roll_back_demo():\n # return harvey rupp to belmont hill team\n bh = Team.query.get(161)\n print(f'retrieved {bh}')\n hr = Runner.query.get(1700)\n print(f'retrieved {hr}')\n if bh not in hr.teams:\n bh.runners.append(hr)\n db.session.commit()\n\n # set primary_key values below which will be untouched\n first_deleted_race = 19\n first_deleted_runner = 3712\n first_deleted_result = 4750\n first_deleted_school = 68\n first_deleted_team = 315\n first_deleted_location = 8\n first_deleted_course = 9\n first_deleted_league = 4\n\n # do not allow unless user is administrator\n if not current_user.is_administrator():\n return redirect(url_for('races.results', race_id=race.id))\n\n # delete races and associated results for races in delete range\n races = Race.query.all()\n for race in races:\n if race.id >= first_deleted_race:\n delete_race_by_id(race.id)\n\n # disassociate runners from teams and delete\n teams = Team.query.all()\n for team in teams:\n if team.id >= first_deleted_team:\n team.runners.clear()\n db.session.commit()\n\n runners = Runner.query.all()\n for runner in runners:\n if runner.id >= first_deleted_runner:\n db.session.delete(runner)\n db.session.commit()\n\n # delete teams\n for team in teams:\n if team.id >= first_deleted_team:\n db.session.delete(team)\n db.session.commit()\n\n # delete courses\n courses = Course.query.all()\n for course in courses:\n if course.id >= first_deleted_course:\n db.session.delete(course)\n db.session.commit()\n\n # disassociate locaions from schools and delete\n schools = School.query.all()\n for school in schools:\n if school.id >= first_deleted_school:\n school.locations.clear()\n db.session.commit()\n\n locations = Location.query.all()\n for location in locations:\n if location.id >= first_deleted_location:\n db.session.delete(location)\n db.session.commit()\n\n # disassociate schools from leagues and delete\n leagues = League.query.all()\n for league in leagues:\n if league.id >= first_deleted_league:\n league.schools.clear()\n db.session.commit()\n\n for school in schools:\n if school.id >= first_deleted_school:\n db.session.delete(school)\n db.session.commit()\n\n # delete leagues\n for league in leagues:\n if league.id >= first_deleted_league:\n db.session.delete(league)\n db.session.commit()\n\n # recalculate all runners seed times\n async_update_all_seed_times.delay()\n\n # update league standings via background task\n for league_id in [1, 2]:\n async_update_league_standings.delay(league_id=league_id)\n return redirect(url_for('core.index'))", "def rollback(self):\n # PEP 249\n raise impala.error.NotSupportedError()", "def rollback(self):\n self._rollback = True", "def rollback(self):\n raise TransactionRollback('rollback called outside of transaction')", "def test_merge_backup_with_failover_logs(self):\n self.log.info(\"Load 1st batch docs\")\n create_gen1 = BlobGenerator(\"ent-backup1\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, create_gen1, \"create\", 0)\n failed_persisted_bucket = []\n rest = RestConnection(self.master)\n cluster_nodes = rest.get_nodes()\n for bucket in self.buckets:\n ready = RebalanceHelper.wait_for_stats_on_all(self.backupset.cluster_host,\n bucket.name, 'ep_queue_size',\n 0, timeout_in_seconds=120)\n if not ready:\n failed_persisted_bucket.append(bucket.name)\n if failed_persisted_bucket:\n self.fail(\"Buckets %s did not persisted.\" % failed_persisted_bucket)\n self.log.info(\"Stop persistence at each node\")\n clusters = copy.deepcopy(cluster_nodes)\n shell = RemoteMachineShellConnection(self.backupset.backup_host)\n for bucket in self.buckets:\n for node in clusters:\n shell.execute_command(\"%scbepctl%s %s:11210 -b %s stop\" % \\\n (self.cli_command_location,\n self.cmd_ext,\n node.ip,\n bucket.name))\n shell.disconnect()\n self.log.info(\"Load 2nd batch docs\")\n create_gen2 = BlobGenerator(\"ent-backup2\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, create_gen2, \"create\", 0)\n self.sleep(5)\n self.log.info(\"Crash cluster via kill memcached\")\n for node in clusters:\n for server in self.servers:\n if node.ip == server.ip:\n num_entries = 4\n reach_num_entries = False\n while not reach_num_entries:\n shell = RemoteMachineShellConnection(server)\n shell.kill_memcached()\n ready = False\n while not ready:\n if not RestHelper(RestConnection(server)).is_ns_server_running():\n self.sleep(10)\n else:\n ready = True\n cmd = \"%scbstats%s %s:11210 failovers -u %s -p %s | grep num_entries \" \\\n \"| gawk%s '{printf $2}' | grep -m 5 '4\\|5\\|6\\|7'\" \\\n % (self.cli_command_location, self.cmd_ext, server.ip,\n \"cbadminbucket\", \"password\", self.cmd_ext)\n output, error = shell.execute_command(cmd)\n shell.disconnect()\n if output:\n self.log.info(\"number failover logs entries reached. %s \" % output)\n reach_num_entries = True\n self.backup_create()\n self.log.info(\"Start backup data\")\n self.backup_cluster()\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Load 3rd batch docs\")\n create_gen3 = BlobGenerator(\"ent-backup3\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, create_gen3, \"create\", 0)\n self.backup_cluster()\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)", "def rollback(self):\n try:\n if self._cur_batch:\n self._cur_batch.rollback()\n except ValueError:\n # ignore \"Batch must be in progress to rollback\" error\n pass\n self._cur_batch = None\n self._num_mutations = 0", "def rollback(self) -> None:\n with self.lock:\n self.wait(self._rollback_gen())", "def rollback(commit_id):\n _confirm_branch()\n \n require('settings', provided_by=[production, staging])\n require('branch', provided_by=[stable, master, branch])\n \n maintenance_up()\n checkout_latest()\n git_reset(commit_id)\n gzip_assets()\n deploy_to_s3()\n maintenance_down()", "def _rollback_context(self, persister):\n try:\n # Rollback the job transactional context.\n persister.rollback()\n\n except _errors.DatabaseError as error:\n _LOGGER.error(\n \"Error in %s rolling back job's context.\",\n self.__action.__name__, exc_info=error\n )\n\n # Update the job status.\n self.__result = False\n message = \"Tried to execute action ({0}).\".format(\n self.__action.__name__)\n self._add_status(Job.ERROR, Job.COMPLETE, message, True)\n\n # Finish context which means mark the job as finished\n # and update procedure's information.\n self._finish_context(False)", "def _do_rollback(self):\n self.backend.rollback()", "def rollback(self, schema: ArchiveSchema, writer: ArchiveFileWriter, version: int):\n # Get an updated shapshot listing.\n snapshots = self.snapshots.rollback(version=version)\n # Materialize the modified archive.\n self._write(schema=schema, writer=writer, snapshots=snapshots)\n # Update the cached objects\n self.schema = schema\n self.snapshots = snapshots", "def rollback(self, exc):\n USER.info('%s: Rolling Back Failed Build', self.recipe.name)\n cascade = False\n if isinstance(exc, AssertionError):\n logging.error('Error during verify() of %s', self.recipe.name)\n cascade = True\n if cascade or isinstance(exc, PakitLinkError):\n if not cascade:\n logging.error('Error during linking of %s', self.recipe.name)\n walk_and_unlink(self.recipe.install_dir, self.recipe.link_dir)\n cascade = True\n if cascade or (not isinstance(exc, PakitLinkError) and\n not isinstance(exc, AssertionError)):\n if not cascade:\n logging.error('Error during build() of %s', self.recipe.name)\n try:\n Command('rm -rf ' + self.recipe.install_dir).wait()\n except PakitCmdError: # pragma: no cover\n pass", "def rollbackSnapshotLXCContainer(self,node,vmid,snapname):\n post_data = {}\n data = self.connect('post','nodes/%s/lxc/%s/snapshot/%s/rollback' % (node,vmid,snapname), post_data) \n return data", "def revert(self, snapshot):\n state_root, checkpoint_id = snapshot\n\n with self.state_db() as state_db:\n # first revert the database state root.\n state_db.root_hash = state_root\n # now roll the underlying database back\n\n self.chaindb.revert(checkpoint_id)", "def rollback(self):\n self._connection.execute_nonquery(\"sql\", \"ROLLBACK\", True)", "def rollbackVirtualMachine(self,node,vmid,snapname):\n post_data = None\n data = self.connect('post',\"nodes/%s/qemu/%s/snapshot/%s/rollback\" % (node,vmid,snapname), post_data)\n return data", "def rollback(self):\r\n self.db.rollback()", "def test_merge_backup_with_merge_kill_and_re_merge(self):\n gen = BlobGenerator(\"ent-backup1\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=self.backupset.number_of_backups)\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n self.backupset.end = 2\n\n self.merged = True\n merge_threads = []\n merge_thread = Thread(target=self.backup_merge)\n merge_threads.append(merge_thread)\n merge_thread.start()\n merge_kill_thread = Thread(target=self._kill_cbbackupmgr)\n merge_threads.append(merge_kill_thread)\n merge_kill_thread.start()\n for merge_thread in merge_threads:\n merge_thread.join()\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n result, output, _ = self.backup_merge()\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)", "def rollback(self):\n self.success = False\n self.close()", "def test_revision_diff_delete_then_rollback(self):\n payload = base.DocumentFixture.get_minimal_fixture()\n bucket_name = test_utils.rand_name('bucket')\n created_documents = self.create_documents(bucket_name, payload)\n revision_id = created_documents[0]['revision_id']\n\n # Delete all previously created documents.\n deleted_documents = self.create_documents(bucket_name, [])\n comparison_revision_id = deleted_documents[0]['revision_id']\n\n # Validate that the empty bucket is deleted.\n self._verify_buckets_status(\n revision_id, comparison_revision_id, {bucket_name: 'deleted'})\n\n # Rollback to first non-empty revision.\n rollback_revision_id = self.rollback_revision(revision_id)['id']\n # Validate that diffing rolled-back revision against 1 is unmodified.\n self._verify_buckets_status(\n revision_id, rollback_revision_id, {bucket_name: 'unmodified'})\n\n # Validate that diffing rolled-back revision against 2 is created\n # (because the rolled-back revision is newer than revision 2).\n self._verify_buckets_status(\n comparison_revision_id, rollback_revision_id,\n {bucket_name: 'created'})", "def _rollback_to_last_consistent_state(self):\n\n with recording_failure_handler():\n need_unfinished_action_rollback = not self._action_recorder.is_empty() and not self._action_recorder.last_action_is_finished()\n\n if need_unfinished_action_rollback:\n\n with recording_failure_handler():\n (name, args, kwargs) = self._action_recorder.get_unfinished_action()\n action = self._action_registry.get_action(name)\n\n # we try to rollback the unfinished action\n action.rollback_action(args=args, kwargs=kwargs, was_interrupted=True)\n\n with recording_failure_handler():\n self._action_recorder.rollback_unfinished_action()\n\n return True\n\n return False", "def rollback(self):\n self.db.rollback()", "def rollback(self, target_revision_id):\n url = DeckhandClient.get_path(\n DeckhandPaths.ROLLBACK\n ).format(target_revision_id)\n\n response = self._post_request(url)\n self._handle_bad_response(response)", "def undo(self):\n if self._snapshot_index >= 0:\n snapshot = self._snapshots[self._snapshot_index]\n for chunk_location in snapshot:\n dimension, cx, cz = chunk_location\n chunk = self._unserialise_chunk(dimension, cx, cz, -1)\n self._chunk_cache[chunk_location] = chunk\n self._snapshot_index -= 1", "def rollback(self, mapset=None):\n if mapset is None:\n mapset = self.current_mapset", "def test_fork_snapshot_bad_restore(network, example_snapshot):\n fail_name = uuid.uuid4().hex\n node = \"as2border1\"\n\n bf_set_network(network)\n # Should fail when trying to restore an item that was never deactivated\n # in base snapshot\n with pytest.raises(HTTPError):\n bf_fork_snapshot(\n base_name=example_snapshot, name=fail_name, restore_nodes=[node]\n )", "def rollback(self):\n self.conn.rollback()", "def test_rollback():", "def abort_merge():\n common.safe_git_call('merge --abort')", "def test_merge_backup_with_partial_backup(self):\n gen = BlobGenerator(\"ent-backup1\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=self.backupset.number_of_backups)\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n backup_threads = []\n backup_thread = Thread(target=self.backup_cluster)\n backup_threads.append(backup_thread)\n backup_thread.start()\n backup_kill_thread = Thread(target=self._kill_cbbackupmgr)\n backup_threads.append(backup_kill_thread)\n backup_kill_thread.start()\n for backup_thread in backup_threads:\n backup_thread.join()\n self.backupset.number_of_backups += 1\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n self.backupset.end = 3\n self.merged = True\n status, output, error = self.backup_merge()\n if status:\n self.fail(\"This merge should fail due to last backup killed, not complete yet\")\n elif \"Merging backup failed\" in error:\n self.log.info(\"Test failed as expected as last backup failed to complete\")\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)", "def test_merge_backup_from_old_and_new_bucket(self):\n gen = BlobGenerator(\"ent-backup1_\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.log.info(\"Start doing backup\")\n self.backup_create()\n self.backup_cluster()\n if self.bucket_delete:\n self.log.info(\"Start to delete bucket\")\n BucketOperationHelper.delete_all_buckets_or_assert([self.master], self)\n BucketOperationHelper.create_bucket(serverInfo=self.master, test_case=self)\n elif self.bucket_flush:\n self.log.info(\"Start to flush bucket\")\n self._all_buckets_flush()\n gen = BlobGenerator(\"ent-backup2_\", \"ent-backup-\", self.value_size, end=self.num_items)\n self.log.info(\"Start to load bucket again with different key\")\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_cluster()\n self.backupset.number_of_backups += 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n self.backupset.end = self.backupset.number_of_backups\n self.merged = True\n result, output, _ = self.backup_merge()\n self.backupset.end -= 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n current_vseqno = self.get_vbucket_seqnos(self.cluster_to_backup, self.buckets,\n self.skip_consistency, self.per_node)\n self.log.info(\"*** Start to validate data in merge backup \")\n self.validate_backup_data(self.backupset.backup_host, [self.master],\n \"ent-backup\", False, False, \"memory\",\n self.num_items, \"ent-backup1\")\n self.backup_cluster_validate(skip_backup=True)", "def rollback(self):\n for db in self.values():\n db.rollback()", "async def rollback(self):\n if not self._active:\n raise ProfileSessionInactiveError()\n await self._teardown(commit=False)\n self._active = False", "def RollBack(self):\r\n self.conn.rollback()", "def _recover(self,):\n modlogger.debug( \"starting recovery\")\n with self.id_lock: #Prevent new ops being created.\n logs = [ LogFile(x,readonly=True) for x in self._findlogs() ]\n logiter = [ iter(x) for x in logs ]\n ops = [ _getop(x) for x in logiter ]\n opids = [ _getid(x) for x in ops ]\n #order the log files by operation Id.\n data = sorted(zip(logs,logiter,ops,opids),key =lambda x:x[3])\n modlogger.debug( \"SR:%s\"%data)\n #And now got through all log files in Id order\n state = 'init'\n unrecoverable = []\n for log,it,op,opid in data:\n for cur_op in chain([op],it):\n #cur_op None indicated end of that logfile.\n if cur_op is None: break\n\n #We ignore any ops until we see a 'startTxn' marker, but we\n # keep a record of there ids to ensure we see a later checkpoint.\n # if we don't we can't replay partial Txn.\n modlogger.debug( \"R:%s,%s\",cur_op,state)\n if state=='init':\n #Record all operations we see before we see the first\n #start tx marker.\n if cur_op.optype == b'start_txn':\n state='txcomplete'\n elif cur_op.optype == b'abort_txn':\n #If the partial transaction we found was aborted\n # we don't need to worry about its operations. \n unrcoverable = [ ]\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else:\n unrecoverable += [ op.opid]\n \n\n #We are looking for a starttxn, marker to mark the operation\n #as valid. The only other meaningful transaction in the\n #journal in the state is a checkpoint making which ops have been\n #detected as committed to the main store by the FS.\n if state=='txcomplete':\n if cur_op.optype == b'start_txn':\n tx = cur_op.txn_id\n txops = [ ]\n state = 'txstarted'\n continue\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else: raise RecoveryError(\"Operation outside tx\")\n\n #In this state all operations are meaningful.\n # we store all operations (except checkpoint) until we see\n # a EndTxn op. At the end TxnOp we synchronously complete\n # all operations.\n if state =='txstarted':\n if cur_op.optype == b'end_txn': \n #The test below finds 'overlapped' tx, (or ones missing a commit record\n #for some reason. This forces us not to accept this log file.\n if cur_op.txn_id != tx: raise RecoveryError(\"Non matching Tx commit found\")\n else:\n for top in txops:\n top.do(sync = True)\n state = 'txcomplete'\n elif cur_op.optype == b'abort_txn':\n state = 'txcomplete'\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else:\n txops += [ cur_op ] \n #Log file has been processed successfully - remove it from the Fs.\n #we could call close() here and reused the allocated space on the\n #FS - but the logfile is readonly - and close() adds a terminator\n #to mark the file as empty.\n try:\n log.unlink()\n except OSError: pass\n\n #If there are any partial txn's left we have failed to recover.\n if unrecoverable: raise RecoveryError(\"Partial uncommitted txn found\")", "def test_blog_rollback():", "def rollback_transaction(self, event=None):\n assert self._current_transaction\n\n # Store stacks\n undo_stack = list(self._undo_stack)\n\n erroneous_tx = self._current_transaction\n self._current_transaction = None\n try:\n with Transaction(self.event_manager):\n try:\n erroneous_tx.execute()\n except Exception as e:\n logger.error(\"Could not roolback transaction\")\n logger.error(e)\n finally:\n # Discard all data collected in the rollback \"transaction\"\n self._undo_stack = undo_stack\n\n self._action_executed()", "def rollback_action(args, kwargs, was_interrupted, result=None):\n raise NotImplementedError()", "def rollback(self, rollback_to):\n raise NotImplementedError", "def load_failed_tas():\n logger.info('Loading TAS Failing Edits')\n load_all_tas_failing_edits()", "def redo(self):\n if self._snapshot_index <= len(self._snapshots) - 2:\n snapshot = self._snapshots[self._snapshot_index + 1]\n for chunk_location in snapshot:\n dimension, cx, cz = chunk_location\n chunk = self._unserialise_chunk(dimension, cx, cz, 1)\n self._chunk_cache[chunk_location] = chunk\n self._snapshot_index += 1", "def test_migration_task_rollback(self):\n server, source_host, target_host = self._create_server()\n self._disable_target_host(target_host)\n self._stub_delete_server_during_scheduling(server)\n\n # Now start the cold migration which will fail due to NoValidHost.\n self.api.post_server_action(server['id'], {'migrate': None},\n check_response_status=[202])\n # We cannot monitor the migration from the API since it is deleted\n # when the instance is deleted so just wait for the failed instance\n # action event after the task rollback happens.\n # Note that we get InstanceNotFound rather than NoValidHost because\n # the NoValidHost handler in ComputeTaskManager._cold_migrate calls\n # _set_vm_state_and_notify which raises InstanceNotFound and masks\n # the NoValidHost error.\n self._assert_resize_migrate_action_fail(\n server, instance_actions.MIGRATE, 'InstanceNotFound')\n self._assert_no_allocations(server)", "def test_backup_restore_after_rebalance(self):\n serv_in = self.servers[self.nodes_init:self.nodes_init + self.nodes_in]\n serv_out = self.servers[self.nodes_init - self.nodes_out:self.nodes_init]\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create_validate()\n self.backupset.number_of_backups = 1\n rebalance = self.cluster.async_rebalance(self.cluster_to_backup, serv_in, serv_out)\n rebalance.result()\n self.backup_cluster_validate()\n if not self.same_cluster:\n self._initialize_nodes(Cluster(), self.input.clusters[0][:self.nodes_init])\n serv_in = self.input.clusters[0][self.nodes_init: self.nodes_init + self.nodes_in]\n serv_out = self.input.clusters[0][self.nodes_init - self.nodes_out: self.nodes_init]\n rebalance = self.cluster.async_rebalance(self.cluster_to_restore, serv_in, serv_out)\n else:\n rebalance = self.cluster.async_rebalance(self.cluster_to_restore, serv_out, serv_in)\n rebalance.result()\n self.backup_restore_validate(compare_uuid=False, seqno_compare_function=\"<=\")", "def rollback(self) -> None:\n if self._transaction is None:\n pass\n else:\n self._transaction.rollback(_to_root=True)", "def test_backup_restore_with_audit(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n audit_obj = audit(AUDITBACKUPID, self.backupset.cluster_host)\n status = audit_obj.getAuditStatus()\n self.log.info(\"Audit status on {0} is {1}\".format(self.backupset.cluster_host.ip, status))\n if not status:\n self.log.info(\"Enabling audit on {0}\".format(self.backupset.cluster_host.ip))\n audit_obj.setAuditEnable('true')\n self.backup_create()\n self.backup_cluster()\n field_verified, value_verified = audit_obj.validateEvents(self._get_event_expected_results(action='backup'))\n self.assertTrue(field_verified, \"One of the fields is not matching\")\n self.assertTrue(value_verified, \"Values for one of the fields is not matching\")\n audit_obj = audit(AUDITBACKUPID, self.backupset.restore_cluster_host)\n status = audit_obj.getAuditStatus()\n self.log.info(\"Audit status on {0} is {1}\".format(self.backupset.restore_cluster_host.ip, status))\n if not status:\n self.log.info(\"Enabling audit on {0}\".format(self.backupset.restore_cluster_host.ip))\n audit_obj.setAuditEnable('true')\n self.backup_restore()\n audit_obj = audit(AUDITRESTOREID, self.backupset.restore_cluster_host)\n field_verified, value_verified = audit_obj.validateEvents(self._get_event_expected_results(action='restore'))\n self.assertTrue(field_verified, \"One of the fields is not matching\")\n self.assertTrue(value_verified, \"Values for one of the fields is not matching\")", "def Rollback(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def rollback(self):\n if self.dbmi.__name__ == \"psycopg2\":\n if self.connected:\n self.connection.rollback()", "def update_22(db, filename_persist, snapshots_dir, snapshots_reference_dir):\n data = {\n # 'fail'\n 'test/test_pyglet_vb.py' : {\n 'st': 'fail', 'diag': 'incomplete grossini rendition at first frame'},\n\n # 'error'\n 'test/test_text_movement.py' : {\n 'st': 'error',\n 'diag': 'position should be set at the node level, not at the element level'},\n\n 'test/test_schedule_interval.py' : {\n 'st':'error', 'diag': 'bad timestamps, repeated snapshots'},\n\n 'test/test_transitions_with_pop_recipe.py' : {\n 'st':'error', 'diag': 'bad timestamps, repeated snapshots'},\n\n 'test/test_SequenceScene.py' : {\n 'st':'error', 'diag': 'bad timestamps, black frame'},\n\n 'test/test_camera_orbit.py' : {\n 'st':'error', 'diag': 'alternate snapshots are pure black'},\n\n 'test/test_jumptiles3d.py' : {\n 'st':'error', 'diag': \"snpshots don't folow changes in scene\"},\n\n 'test/test_transition_zoom.py' : {\n 'st':'error', 'diag': 'bad timestamps, repeated snapshots'},\n }\n\n ren_key = {'st':'testrun_success', 'diag':'testrun_diagnostic'}\n testrun_props_by_candidate = {}\n for name in data:\n testrun_props_by_candidate[name] = dict([(ren_key[k], data[name][k]) for k in data[name]])\n \n hl.update_testrun__bad(db, filename_persist, testrun_props_by_candidate,\n snapshots_dir, snapshots_reference_dir)", "def transaction_failed_before_processing(self):", "def rollback(self, steps=1) -> None:\n raise NotImplementedError(\n \"Open transactions are currently not supported. To reset commit head, check WOQLClient.reset\"\n )", "def test_restore_with_erlang_crash(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster()\n rest_conn = RestConnection(self.backupset.restore_cluster_host)\n rest_conn.create_bucket(bucket=\"default\", ramQuotaMB=512)\n try:\n restore_result = self.cluster.async_restore_cluster(backupset=self.backupset,\n objstore_provider=self.objstore_provider,\n no_progress_bar=self.no_progress_bar,\n cli_command_location=self.cli_command_location,\n cb_version=self.cb_version)\n conn = RemoteMachineShellConnection(self.backupset.restore_cluster_host)\n conn.kill_erlang(self.os_name)\n output = restore_result.result(timeout=300)\n self.assertTrue(self._check_output(\n \"Error restoring cluster: Not all data was sent to Couchbase\", output),\n \"Expected error message not thrown by Restore 180 seconds after erlang crash\")\n self.log.info(\"Expected error thrown by Restore 180 seconds after erlang crash\")\n except Exception as ex:\n self.fail(str(ex))\n finally:\n conn.start_couchbase()\n conn.disconnect()\n self.sleep(30)", "def rollback(self):\n\n if not self.is_active:\n return\n\n if self.is_context_active:\n raise states.RolledBack(self)\n else:\n self.__do_rollback()\n self._cleanup()", "def rollback(self):\n conn = self.threadingLocal.connection\n if isinstance(conn, Transaction) and not conn._obsolete:\n self.threadingLocal.connection.rollback()", "def merge(self, snapshot):\n\n if not self.__settings:\n return\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot)\n self._merge_synthetics_events(snapshot)\n self._merge_error_events(snapshot)\n self._merge_error_traces(snapshot)\n self._merge_custom_events(snapshot)\n self._merge_span_events(snapshot)\n self._merge_sql(snapshot)\n self._merge_traces(snapshot)", "def recover(self):\n if self.get_info_from_db():\n logger.info(\"Recover by reading previous results\")\n self.check_items(self.get_user_results_from_db())\n else:\n self.create_info_in_db() # create record in axdb", "def rollback(self, cursor):\n\t\twith warnings.catch_warnings():\n\t\t\twarnings.simplefilter(\"ignore\")\n\t\t\tconnection = cursor.connection\n\t\tconnection.rollback()\n\t\tself._close_cursor(cursor)", "def test_backup_restore_misc(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backupset.name = \"!@#$%^&\"\n output, error = self.backup_create()\n self.assertTrue(\"Backup `!@#$%^` created successfully\" in output[0],\n \"Backup could not be created with special characters\")\n self.log.info(\"Backup created with special characters\")\n self.backupset.name = \"backup\"\n self.backup_create()\n self.backup_cluster()\n conn = RemoteMachineShellConnection(self.backupset.backup_host)\n command = \"ls -tr {0}/{1}/{2} | tail\".format(self.backupset.directory, self.backupset.name, self.backups[0])\n o, e = conn.execute_command(command)\n data_dir = o[0]\n conn.execute_command(\"dd if=/dev/zero of=/tmp/entbackup/backup/\" +\n str(self.backups[0]) +\n \"/\" + data_dir + \"/data/shard_0.sqlite\" +\n \" bs=1024 count=100 seek=10 conv=notrunc\")\n output, error = self.backup_restore()\n self.assertTrue(\"Restore failed due to an internal issue, see logs for details\" in output[-1],\n \"Expected error not thrown when file is corrupt\")\n self.log.info(\"Expected error thrown when file is corrupted\")\n conn.execute_command(\"mv /tmp/entbackup/backup /tmp/entbackup/backup2\")\n conn.disconnect()\n output, error = self.backup_restore()\n self.assertTrue(\"Backup Repository `backup` not found\" in output[-1], \"Expected error message not thrown\")\n self.log.info(\"Expected error message thrown\")", "def undo(self):\n LOG.debug(\"In the undo method, will attempt to restore\")\n\n # validate detected nothing to do for this, nothing was done\n # for execute, so simply return\n if self.no_op:\n return\n\n if not self.source_dev or not self.target_dev:\n return\n LOG.debug(\"The source dictionary is: %s\", self.source_dict_restore)\n LOG.debug(\"The target dictionary is: %s\", self.target_dict_restore)\n\n # In scenario where no source IP Address...\n if self.source_dict_restore:\n self.commandex.send_ifcfg(self.source_dev,\n self.source_dict_restore)\n\n # May have failed because the ifcfg didn't even exist, nothing\n # to roll back then\n if self.target_dict_restore:\n self.commandex.send_ifcfg(self.target_dev,\n self.target_dict_restore)", "def rollback_workflow(self, execution_id):\n raise NotImplementedError", "def test_merge_backup_with_multi_threads(self):\n gen = BlobGenerator(\"ent-backup1\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.log.info(\"Start doing backup\")\n self.backup_create()\n self.backup_cluster()\n gen = BlobGenerator(\"ent-backup2\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_cluster(self.threads_count)\n self.backupset.number_of_backups += 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n if int(self.backupset.number_of_backups) == 2:\n self.backupset.end = 2\n elif int(self.backupset.number_of_backups) > 2:\n self.backupset.end = randrange(self.backupset.start,\n self.backupset.number_of_backups + 1)\n self.merged = True\n status, output, _ = self.backup_merge()\n self.backupset.end -= 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n current_vseqno = self.get_vbucket_seqnos(self.cluster_to_backup, self.buckets,\n self.skip_consistency, self.per_node)\n self.log.info(\"*** Start to validate data in merge backup \")\n self.validate_backup_data(self.backupset.backup_host, [self.master],\n \"ent-backup\", False, False, \"memory\",\n self.num_items, None)\n self.backup_cluster_validate(skip_backup=True)", "def rollback(self):\n self._session.rollback()\n\n return True", "def jobFailed(self, jobName):\n\n # ignore non merge jobs\n if jobName.find('mergejob') == -1:\n logging.info(\"Ignoring job %s, since it is not a merge job\" \\\n % jobName)\n # Add cleanup flag for non merge jobs too\n logging.info(\"trigger cleanup for: %s\" % jobName)\n try:\n self.trigger.setFlag(\"cleanup\", jobName, \"MergeAccountant\")\n except (ProdAgentException, ProdException):\n logging.error(\"trying to continue processing failure event\")\n return\n\n # files can be cleaned up now\n logging.info(\"trigger cleanup for: %s\" % jobName)\n\n try:\n self.trigger.setFlag(\"cleanup\", jobName, \"MergeAccountant\")\n except (ProdAgentException, ProdException):\n logging.error(\"trying to continue processing failure event\")\n\n # verify enable condition\n if not self.enabled:\n return\n\n # open a DB connection\n database = MergeSensorDB()\n\n # start a transaction\n database.startTransaction()\n\n # get job information\n try:\n jobInfo = database.getJobInfo(jobName)\n\n # cannot get it!\n except Exception, msg:\n logging.error(\"Cannot process Failure event for job %s: %s\" \\\n % (jobName, msg))\n database.closeDatabaseConnection()\n return\n\n # check that job exists\n if jobInfo is None:\n logging.error(\"Job %s does not exist.\" % jobName)\n database.closeDatabaseConnection()\n return\n\n # check status\n if jobInfo['status'] != 'undermerge':\n logging.error(\"Cannot process Failure event for job %s: %s\" \\\n % (jobName, \"the job is not currently running\"))\n database.closeDatabaseConnection()\n\n return\n\n # get dataset id\n datasetId = database.getDatasetId(jobInfo['datasetName'])\n\n # mark all input files as 'unmerged' (or 'invalid')\n unFinishedFiles = []\n for fileName in jobInfo['inputFiles']:\n\n # update status\n newStatus = database.updateInputFile(\\\n datasetId, fileName, \\\n status = \"unmerged\", \\\n maxAttempts = int(self.args['MaxInputAccessFailures']))\n\n # add invalid files to list of non finished files\n if newStatus == 'invalid':\n unFinishedFiles.append(fileName)\n\n # mark output file as 'failed'\n database.updateOutputFile(datasetId, jobName=jobName, status='failed')\n\n # commit changes\n database.commit()\n\n # notify the PM about the unrecoverable files\n if len(unFinishedFiles) > 0:\n File.merged(unFinishedFiles, True)\n\n # log message\n logging.info(\"Job %s failed, file information updated.\" % jobName)\n\n # close connection\n database.closeDatabaseConnection()", "def assert_tx_failed(ballot_tester, function_to_test, exception = tester.TransactionFailed):\n initial_state = ballot_tester.s.snapshot()\n ballot_tester.assertRaises(exception, function_to_test)\n ballot_tester.s.revert(initial_state)", "def savepoint_rollback(self, id):\n self.execute(\"ROLLBACK TO SAVEPOINT {}\".format(id))", "def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()", "def rollback(self):\n\t\traise GeneratorException(\"Not implemented\")", "def VMRevertToCurrentSnapshot(self):\n statusCode = 0 # process exit code\n\n LOGGER.debug('Trying to revert virtual machine \"{}\" into current snapshot...'.format(VM_NAME))\n\n try:\n current = self.vmInstance.get_current_snapshot_name()\n LOGGER.info('Current snapshot: \"{}\"'.format(current))\n\n self.vmInstance.revert_to_snapshot()\n LOGGER.info('Virtual machine \"{}\" revert to current snapshot successful.'.format(VM_NAME))\n\n self.VMStatus()\n\n except Exception as e:\n statusCode = -1\n LOGGER.debug(e)\n LOGGER.error(traceback.format_exc())\n LOGGER.error('An error occured while revert virtual machine \"{}\" into current snapshot!'.format(VM_NAME))\n\n return statusCode", "def test_backup_merge(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=self.backupset.number_of_backups)\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n backup_count = 0\n \"\"\" remove last 6 chars of offset time in backup name\"\"\"\n if output and output[0]:\n bk_info = json.loads(output[0])\n bk_info = bk_info[\"repos\"][0]\n else:\n return False, \"No output content\"\n\n if bk_info[\"backups\"]:\n for i in range(0, len(bk_info[\"backups\"])):\n backup_name = bk_info[\"backups\"][i][\"date\"]\n if self.debug_logs:\n print(\"backup name \", backup_name)\n print(\"backup set \", self.backups)\n if backup_name in self.backups:\n backup_count += 1\n self.log.info(\"{0} matched in info command output\".format(backup_name))\n self.assertEqual(backup_count, len(self.backups), \"Initial number of backups did not match\")\n self.log.info(\"Initial number of backups matched\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n self.backupset.end = randrange(self.backupset.start + 1, self.backupset.number_of_backups + 1)\n status, output, message = self.backup_merge(check_for_panic=True)\n if not status:\n self.fail(message)\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n backup_count = 0\n if output and output[0]:\n bk_info = json.loads(output[0])\n bk_info = bk_info[\"repos\"][0]\n else:\n return False, \"No output content\"\n if bk_info[\"backups\"]:\n for i in range(0, len(bk_info[\"backups\"])):\n backup_name = bk_info[\"backups\"][i][\"date\"]\n if self.debug_logs:\n print(\"backup name \", backup_name)\n print(\"backup set \", self.backups)\n backup_count += 1\n if backup_name in self.backups:\n self.log.info(\"{0} matched in info command output\".format(backup_name))\n else:\n self.fail(\"Didn't expect backup date {0} from the info command output\" \\\n \" to be in self.backups (the list of exepected backup dates\" \\\n \" after a merge)\".format(backup_name))\n\n self.assertEqual(backup_count, len(self.backups), \"Merged number of backups did not match\")\n self.log.info(\"Merged number of backups matched\")", "def test_04_merge_into(self):\n client = self.client\n\n # Download existing machines.\n machine_1 = check_json(client, 'api/db_default/v4/nts/machines/1')\n machine_3 = check_json(client, 'api/db_default/v4/nts/machines/3')\n # The test is boring if we don't have at least 1 run in each machine.\n self.assertTrue(len(machine_1['runs']) > 0)\n self.assertTrue(len(machine_3['runs']) > 0)\n\n data = {\n 'action': 'merge',\n 'into': '3',\n }\n resp = client.post('api/db_default/v4/nts/machines/1', data=data,\n headers={'AuthToken': 'test_token'})\n self.assertEqual(resp.status_code, 200)\n\n # Old machine should have disappeared.\n resp_2 = client.get('api/db_default/v4/nts/machines/1')\n self.assertEqual(resp_2.status_code, 404)\n\n # The other machine should have the union of all runs.\n machine_1['runs'] = [_hashabledict(run) for run in machine_1['runs']]\n machine_3['runs'] = [_hashabledict(run) for run in machine_3['runs']]\n allruns = set(machine_1['runs']).union(machine_3['runs'])\n resp_3 = check_json(client, 'api/db_default/v4/nts/machines/3')\n resp_3['runs'] = [_hashabledict(run) for run in resp_3['runs']]\n self.assertEqual(set(resp_3['runs']), allruns)", "def test_rollback_deadlock(self):\n\n conn1 = testing.db.connect()\n conn2 = testing.db.connect()\n users = Table(\n \"deadlock_users\",\n metadata,\n Column(\"user_id\", INT, primary_key=True),\n Column(\"user_name\", VARCHAR(20)),\n test_needs_acid=True,\n )\n with conn1.begin():\n users.create(conn1)\n conn1.exec_driver_sql(\"select * from deadlock_users\")\n conn1.close()\n\n # without auto-rollback in the connection pool's return() logic,\n # this deadlocks in PostgreSQL, because conn1 is returned to the\n # pool but still has a lock on \"deadlock_users\". comment out the\n # rollback in pool/ConnectionFairy._close() to see !\n\n with conn2.begin():\n users.drop(conn2)\n conn2.close()", "def performDeltaLoad(df: DataFrame, path: str, pkey: str) -> Dict:\n try:\n oldData = DeltaTable.forPath(spark, path)\n dataToInsert = (\n df.alias(\"updates\")\n .join(oldData.toDF().alias(\"oldData\"), pkey)\n .where(\"oldData.endDate is null AND updates.hashData <> oldData.hashData\")\n )\n stagedUpdates = dataToInsert.selectExpr(\"NULL as mergeKey\", \"updates.*\").union(\n df.alias(\"updates\").selectExpr(\"updates.{0} as mergeKey\".format(pkey), \"*\")\n )\n logger.info(\"Upsert Started\")\n oldData.alias(\"oldData\").merge(\n stagedUpdates.alias(\"staged_updates\"),\n \"oldData.endDate is null and oldData.{0} = mergeKey\".format(pkey),\n ).whenMatchedUpdate(\n condition=\" oldData.hashData <> staged_updates.hashData\",\n set={\"endDate\": \"staged_updates.startDate\"},\n ).whenNotMatchedInsert(\n values={\n \"{0}\".format(str(col_name)): \"staged_updates.{0}\".format(str(col_name))\n for col_name in stagedUpdates.columns\n if col_name not in \"mergeKey\"\n }\n ).execute()\n # logger.info(\"Upsert Completed\")\n return {\"status\": \"Success\", \"message\": \"\"}\n except Exception as e:\n # logger.error(\n # \"Failed while performing the Increamental load for {0} error : {1}\".format(\n # path, str(e)[:100]\n # )\n # )\n return {\"status\": \"Failed\", \"message\": str(e)}\n # raise DLoaderException(\n # \"Failed while loading the incremental data into delta table : {0}\".format(e)\n # )", "def rollback(self, block: 'Block') -> None:\n # Check for block validation before rollback\n self._precommit_data_manager.validate_precommit_block(block)\n self._precommit_data_manager.rollback(block)", "def rollback_transaction(self):\n cursor = self._cursor()\n cursor.close()\n self._db.rollback()\n self._end_transaction()", "def test_base_replica_repair_with_contention(self):\n self._base_replica_repair_test(fail_mv_lock=True)", "def process_exception(self, request, exception):\r\n if transaction.is_dirty():\r\n transaction.rollback()\r\n transaction.leave_transaction_management()", "def RestoreSnapshot(\r\n self,\r\n snapshot_data: Any,\r\n ):\r\n\r\n (\r\n self_id,\r\n iter,\r\n num_results,\r\n ignore_whitespace_ctr,\r\n ) = snapshot_data\r\n\r\n assert self_id == id(self)\r\n assert iter.Offset <= self.normalized_iter.Offset\r\n assert num_results <= len(self.results)\r\n assert ignore_whitespace_ctr <= self._ignore_whitespace_ctr\r\n\r\n self.normalized_iter = iter\r\n self._ignore_whitespace_ctr = ignore_whitespace_ctr\r\n\r\n if len(self.results) != num_results:\r\n del self.results[num_results - len(self.results):]", "def rollback(folder_name, with_subfolders):\n process_backups(folder_name, with_subfolders, lambda x: copy2(x, x[:-4]))", "def test_decommit_after_battle(self):\n sess = self.sess\n self.battle.submission_id = \"TEST\" # So update_all will work correctly\n\n old = self.alice.committed_loyalists\n self.battle.create_skirmish(self.alice, 5)\n\n # And just like that, the battle's over\n self.end_battle()\n\n self.assertEqual(self.alice.committed_loyalists, old)", "def test_unlock_failure(self):\n # Make sure the image file doesn't exist.\n if os.path.exists(IMAGE_FILE):\n os.unlink(IMAGE_FILE)\n # Ask rsync-system-backup to use the encrypted filesystem on the image\n # file anyway, because we know it will fail and that's exactly what\n # we're interested in :-).\n program = RsyncSystemBackup(\n crypto_device=CRYPTO_NAME,\n destination=os.path.join(MOUNT_POINT, 'latest'),\n mount_point=MOUNT_POINT,\n )\n # When `cryptdisks_start' fails it should exit with a nonzero exit\n # code, thereby causing executor to raise an ExternalCommandFailed\n # exception that obscures the FailedToUnlockError exception that we're\n # interested in. The check=False option enables our `last resort error\n # handling' code path to be reached.\n program.destination_context.options['check'] = False\n self.assertRaises(FailedToUnlockError, program.execute)", "def test_failure_snapshots_on(pipeline_builder, sdc_executor):\n pipeline = pipeline_builder.build('Failure Snapshots ON')\n pipeline.configuration['shouldCreateFailureSnapshot'] = True\n pipeline.configuration['shouldRetry'] = False\n\n sdc_executor.add_pipeline(pipeline)\n sdc_executor.start_pipeline(pipeline, wait=False).wait_for_status('RUN_ERROR', ignore_errors=True)\n\n snapshots = sdc_executor.get_snapshots(pipeline)\n\n assert len(snapshots) == 1", "def test_restore_with_memcached_crash(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster()\n rest_conn = RestConnection(self.backupset.restore_cluster_host)\n rest_conn.create_bucket(bucket=\"default\", ramQuotaMB=512)\n try:\n conn = RemoteMachineShellConnection(self.backupset.restore_cluster_host)\n conn.pause_memcached(self.os_name)\n output, error = self.backup_restore()\n self.assertTrue(self._check_output(\n \"Error restoring cluster: failed to connect\", output),\n \"Expected error message not thrown by Restore 180 seconds after memcached crash\")\n self.log.info(\"Expected error thrown by Restore 180 seconds after memcached crash\")\n except Exception as ex:\n self.fail(str(ex))\n finally:\n conn.unpause_memcached(self.os_name)\n conn.disconnect()\n self.sleep(30)", "def test_failure_snapshots_on_multiple(pipeline_builder, sdc_executor):\n pipeline = pipeline_builder.build('Failure Snapshots ON (Multiple failures)')\n pipeline.configuration['shouldCreateFailureSnapshot'] = True\n pipeline.configuration['shouldRetry'] = False\n\n sdc_executor.add_pipeline(pipeline)\n # Run and let the pipeline fail at least twice\n sdc_executor.start_pipeline(pipeline, wait=False).wait_for_status('RUN_ERROR', ignore_errors=True)\n sdc_executor.start_pipeline(pipeline, wait=False).wait_for_status('RUN_ERROR', ignore_errors=True)\n\n snapshots = sdc_executor.get_snapshots(pipeline)\n\n assert len(snapshots) == 1", "def _loadBackTxn(self, oid, back, fail=True):\n return self._loadBack_impl(oid, back, fail)[:2]", "def test_restore_with_erlang_crash_and_restart(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster()\n rest_conn = RestConnection(self.backupset.restore_cluster_host)\n rest_conn.create_bucket(bucket=\"default\", ramQuotaMB=512)\n restore_result = self.cluster.async_restore_cluster(backupset=self.backupset,\n objstore_provider=self.objstore_provider,\n no_progress_bar=self.no_progress_bar,\n cli_command_location=self.cli_command_location,\n cb_version=self.cb_version)\n self.sleep(10)\n conn = RemoteMachineShellConnection(self.backupset.restore_cluster_host)\n conn.kill_erlang(self.os_name)\n conn.start_couchbase()\n conn.disconnect()\n timeout_now = 600\n output = restore_result.result(timeout=timeout_now)\n self.assertTrue(self._check_output(\"Restore completed successfully\", output),\n \"Restore failed with erlang crash and restart within 180 seconds\")\n self.log.info(\"Restore succeeded with erlang crash and restart within 180 seconds\")", "def doRestore(self):\n self.logger.log(\"Begin to restore instance status...\")\n \n try:\n self.readConfigInfo()\n self.getUserInfo()\n \n # dump status to file\n cmd = ClusterCommand.getQueryStatusCmd(self.user, self.dbNodeInfo.id, self.__curStatusFile)\n (status, output) = commands.getstatusoutput(cmd)\n if (status != 0):\n self.logger.logExit(\"Query local instance status failed!Error: %s\" % output)\n \n bakDbStatus = DbClusterStatus()\n bakDbStatus.initFromFile(self.__bakStatusFile)\n bakNodeStatus = bakDbStatus.getDbNodeStatusById(self.dbNodeInfo.id)\n if (bakNodeStatus is None):\n self.logger.logExit(\"Get backup status of local node failed!\")\n \n curDbStatus = DbClusterStatus()\n curDbStatus.initFromFile(self.__curStatusFile)\n curNodeStatus = curDbStatus.getDbNodeStatusById(self.dbNodeInfo.id)\n if (curNodeStatus is None):\n self.logger.logExit(\"Get current status of local node failed!\")\n if (not curNodeStatus.isNodeHealthy()):\n self.logger.logExit(\"Current status of node is not healthy!\")\n \n # Compare the status and restore it\n bakInstances = bakNodeStatus.datanodes + bakNodeStatus.gtms\n for bakInst in bakInstances:\n curInst = curNodeStatus.getInstanceByDir(bakInst.datadir)\n if (curInst is None):\n self.logger.logExit(\"Get current status of instance failed!DataDir:%s\" % bakInst.datadir)\n \n if (bakInst.status == curInst.status):\n continue\n \n if (bakInst.status == DbClusterStatus.INSTANCE_STATUS_PRIMARY):\n self.__switchToPrimary(bakInst.datadir)\n elif (bakInst.status == DbClusterStatus.INSTANCE_STATUS_STANDBY):\n self.__switchToStandby(bakInst.datadir)\n \n except Exception, e:\n self.logger.logExit(str(e))\n \n self.logger.log(\"Restore instance status successfully.\")\n self.logger.closeLog()", "def test_repair_hive_table_failed_refresh(self, mock_logging):\n self.client.athena_client = MockAthenaClient(result_state='FAILED')\n\n # This bucket is not in our `repair_hive_table` config map\n self.client.repair_hive_table({'unit-testing.streamalerts'})\n assert_true(mock_logging.error.called)", "def rollback(obj, commit):\n copy = deepcopy(obj)\n if commit.uses_slots is False:\n copy.__dict__ = commit.state\n else:\n for k, v in commit.state.items():\n setattr(copy, k, v)\n\n return copy", "def _test_base_view_consistency_on_crash(self, fail_phase):\n\n self.cluster.set_batch_commitlog(enabled=True, use_batch_window = self.cluster.version() < '5.0')\n self.fixture_dtest_setup.ignore_log_patterns = [r'Dummy failure', r\"Failed to force-recycle all segments\"]\n self.prepare(rf=1, install_byteman=True)\n node1, node2, node3 = self.cluster.nodelist()\n session = self.patient_exclusive_cql_connection(node1)\n session.execute('USE ks')\n\n session.execute(\"CREATE TABLE t (id int PRIMARY KEY, v int, v2 text, v3 decimal)\")\n session.execute((\"CREATE MATERIALIZED VIEW t_by_v AS SELECT * FROM t \"\n \"WHERE v IS NOT NULL AND id IS NOT NULL PRIMARY KEY (v, id)\"))\n\n session.cluster.control_connection.wait_for_schema_agreement()\n\n logger.debug('Make node1 fail {} view writes'.format(fail_phase))\n node1.byteman_submit([mk_bman_path('fail_{}_view_write.btm'.format(fail_phase))])\n\n logger.debug('Write 1000 rows - all node1 writes should fail')\n\n failed = False\n for i in range(1, 1000):\n try:\n session.execute(\"INSERT INTO t (id, v, v2, v3) VALUES ({v}, {v}, 'a', 3.0) USING TIMESTAMP {v}\".format(v=i))\n except WriteFailure:\n failed = True\n\n assert failed, \"Should fail at least once.\"\n assert node1.grep_log(\"Dummy failure\"), \"Should throw Dummy failure\"\n\n missing_entries = 0\n session = self.patient_exclusive_cql_connection(node1)\n session.execute('USE ks')\n for i in range(1, 1000):\n view_entry = rows_to_list(session.execute(SimpleStatement(\"SELECT * FROM t_by_v WHERE id = {} AND v = {}\".format(i, i),\n consistency_level=ConsistencyLevel.ONE)))\n base_entry = rows_to_list(session.execute(SimpleStatement(\"SELECT * FROM t WHERE id = {}\".format(i),\n consistency_level=ConsistencyLevel.ONE)))\n\n if not base_entry:\n missing_entries += 1\n if not view_entry:\n missing_entries += 1\n\n logger.debug(\"Missing entries {}\".format(missing_entries))\n assert missing_entries > 0\n\n logger.debug('Restarting node1 to ensure commit log is replayed')\n node1.stop(wait_other_notice=True)\n # Set batchlog.replay_timeout_seconds=1 so we can ensure batchlog will be replayed below\n node1.start(jvm_args=[\"-Dcassandra.batchlog.replay_timeout_in_ms=1\"])\n\n logger.debug('Replay batchlogs')\n time.sleep(0.001) # Wait batchlog.replay_timeout_in_ms=1 (ms)\n self._replay_batchlogs()\n\n logger.debug('Verify that both the base table entry and view are present after commit and batchlog replay')\n session = self.patient_exclusive_cql_connection(node1)\n session.execute('USE ks')\n for i in range(1, 1000):\n view_entry = rows_to_list(session.execute(SimpleStatement(\"SELECT * FROM t_by_v WHERE id = {} AND v = {}\".format(i, i),\n consistency_level=ConsistencyLevel.ONE)))\n base_entry = rows_to_list(session.execute(SimpleStatement(\"SELECT * FROM t WHERE id = {}\".format(i),\n consistency_level=ConsistencyLevel.ONE)))\n\n assert base_entry, \"Both base {} and view entry {} should exist.\".format(base_entry, view_entry)\n assert view_entry, \"Both base {} and view entry {} should exist.\".format(base_entry, view_entry)", "def cleanup(self):\n self._restore_als_disable()\n self._set_brightness_percent(self._original_brightness)\n\n # Check results to make sure backlight levels were preserved across\n # transition events.\n num_failed = 0\n for test_name in self._results:\n old_brightness = self._results[test_name]['old']\n new_brightness = self._results[test_name]['new']\n\n if old_brightness == new_brightness:\n logging.info('Transition event [ PASSED ]: %s', test_name)\n else:\n logging.info('Transition event [ FAILED ]: %s', test_name)\n logging.info(' Brightness changed: %d -> %d',\n old_brightness, new_brightness)\n num_failed += 1\n\n if num_failed > 0:\n raise error.TestFail(('Failed to preserve backlight over %d '\n 'transition event(s).') % num_failed)", "def vm_snapshot_revert(self, vm_moid, snap_moid):\n result = DataResult()\n try:\n snap_mor = self._get_vm_snapshot_mor(vm_moid, snap_moid)\n task_mor = snap_mor.RevertToSnapshot_Task()\n result.task_key = task_mor._moId\n except Exception as ex:\n LOG.exception(ex)\n result.status = False\n result.message = \"Revert snapshot of the vm error: %s\" % str(ex)\n return result", "def transaction_failed(self):", "def test_auto_rollback(self):\n self.mocked_cursor.execute.side_effect = psycopg2.Error('testing')\n\n db = database.Database()\n try:\n db.execute(sql=\"SELECT * from FOO WHERE bar LIKE 'baz'\")\n except database.DatabaseError:\n pass\n\n self.assertEqual(self.mocked_connection.rollback.call_count, 1)", "def test_backup_restore_sanity(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self.log.info(\"*** start to load items to all buckets\")\n self._load_all_buckets(self.master, gen, \"create\", self.expires)\n self.log.info(\"*** done to load items to all buckets\")\n self.ops_type = self.input.param(\"ops-type\", \"update\")\n self.expected_error = self.input.param(\"expected_error\", None)\n if self.auto_failover:\n self.log.info(\"Enabling auto failover on \" + str(self.backupset.cluster_host))\n rest_conn = RestConnection(self.backupset.cluster_host)\n rest_conn.update_autofailover_settings(self.auto_failover, self.auto_failover_timeout)\n self.backup_create_validate()\n for i in range(1, self.backupset.number_of_backups + 1):\n if self.ops_type == \"update\":\n self.log.info(\"*** start to update items in all buckets\")\n self._load_all_buckets(self.master, gen, \"update\", self.expires)\n self.log.info(\"*** done update items in all buckets\")\n elif self.ops_type == \"delete\":\n self.log.info(\"*** start to delete items in all buckets\")\n self._load_all_buckets(self.master, gen, \"delete\", self.expires)\n self.log.info(\"*** done to delete items in all buckets\")\n self.sleep(10)\n self.log.info(\"*** start to validate backup cluster\")\n self.backup_cluster_validate()\n self.targetMaster = True\n start = randrange(1, self.backupset.number_of_backups + 1)\n if start == self.backupset.number_of_backups:\n end = start\n else:\n end = randrange(start, self.backupset.number_of_backups + 1)\n self.log.info(\"*** start to restore cluster\")\n restored = {\"{0}/{1}\".format(start, end): \"\"}\n for i in range(1, self.backupset.number_of_backups + 1):\n if self.reset_restore_cluster:\n self.log.info(\"\\n*** start to reset cluster\")\n self.backup_reset_clusters(self.cluster_to_restore)\n cmd_init = 'node-init'\n if self.same_cluster:\n self.log.info(\"Same cluster\")\n self._initialize_nodes(Cluster(), self.servers[:self.nodes_init])\n if self.hostname and self.master.ip.endswith(\".com\"):\n options = '--node-init-hostname ' + self.master.ip\n shell = RemoteMachineShellConnection(self.master)\n output, _ = shell.execute_couchbase_cli(cli_command=cmd_init,\n options=options,\n cluster_host=\"localhost\",\n user=self.master.rest_username,\n password=self.master.rest_password)\n shell.disconnect()\n if not self._check_output(\"SUCCESS: Node initialize\", output):\n raise(\"Failed to set hostname\")\n else:\n self.log.info(\"Different cluster\")\n shell = RemoteMachineShellConnection(self.backupset.restore_cluster_host)\n shell.enable_diag_eval_on_non_local_hosts()\n rest = RestConnection(self.backupset.restore_cluster_host)\n rest.force_eject_node()\n rest.init_node()\n if self.hostname and self.backupset.restore_cluster_host.ip.endswith(\".com\"):\n options = '--node-init-hostname ' + self.backupset.restore_cluster_host.ip\n output, _ = shell.execute_couchbase_cli(cli_command=cmd_init, options=options,\n cluster_host=\"localhost\",\n user=self.backupset.restore_cluster_host.rest_username,\n password=self.backupset.restore_cluster_host.rest_password)\n if not self._check_output(\"SUCCESS: Node initialize\", output):\n raise(\"Failed to set hostname\")\n shell.disconnect()\n self.log.info(\"\\n*** Done reset cluster\")\n self.sleep(10)\n\n \"\"\" Add built-in user cbadminbucket to second cluster \"\"\"\n self.add_built_in_server_user(node=self.input.clusters[0][:self.nodes_init][0])\n\n self.backupset.start = start\n self.backupset.end = end\n self.log.info(\"*** start restore validation\")\n self.backup_restore_validate(compare_uuid=False,\n seqno_compare_function=\">=\",\n expected_error=self.expected_error)\n if self.backupset.number_of_backups == 1:\n continue\n while \"{0}/{1}\".format(start, end) in restored:\n start = randrange(1, self.backupset.number_of_backups + 1)\n if start == self.backupset.number_of_backups:\n end = start\n else:\n end = randrange(start, self.backupset.number_of_backups + 1)\n restored[\"{0}/{1}\".format(start, end)] = \"\"" ]
[ "0.6461394", "0.60570455", "0.5918345", "0.5801982", "0.57985705", "0.56479573", "0.5587846", "0.5585826", "0.55499566", "0.55391157", "0.55083054", "0.54737455", "0.54621094", "0.5453677", "0.54407775", "0.5388638", "0.5380963", "0.53626865", "0.53533244", "0.53509045", "0.5342233", "0.5339826", "0.5336493", "0.53087234", "0.53017044", "0.52920604", "0.5272769", "0.5270568", "0.5263331", "0.52626574", "0.5245591", "0.52440184", "0.52408963", "0.5233131", "0.5192178", "0.5179171", "0.5175977", "0.5138807", "0.5138606", "0.51362145", "0.5134753", "0.51295084", "0.5122097", "0.51012784", "0.51009244", "0.5099277", "0.50990593", "0.50719255", "0.50615823", "0.5038115", "0.5012666", "0.5007534", "0.4991485", "0.49854752", "0.49739826", "0.49738264", "0.49735734", "0.4965465", "0.4959097", "0.49468347", "0.49391347", "0.4910414", "0.49058095", "0.48958185", "0.48955432", "0.48896164", "0.4886531", "0.48855042", "0.48800585", "0.48667824", "0.48540896", "0.4851714", "0.48510778", "0.48496482", "0.48455727", "0.48434937", "0.48325565", "0.4825131", "0.48248932", "0.48188207", "0.48184985", "0.48176503", "0.48171553", "0.48144373", "0.48140717", "0.48086444", "0.4804836", "0.48038304", "0.48035532", "0.4798532", "0.47929028", "0.47869506", "0.47853842", "0.47851253", "0.47828975", "0.47682396", "0.4768062", "0.47649774", "0.47646543", "0.47634357" ]
0.77395844
0
Merges metric data from a snapshot. This is used both when merging data from a single transaction into the main stats engine, and for performing a rollback merge. In either case, the merge is done the exact same way.
def merge_metric_stats(self, snapshot): if not self.__settings: return for key, other in six.iteritems(snapshot.__stats_table): stats = self.__stats_table.get(key) if not stats: self.__stats_table[key] = other else: stats.merge_stats(other)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge(self, snapshot):\n\n if not self.__settings:\n return\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot)\n self._merge_synthetics_events(snapshot)\n self._merge_error_events(snapshot)\n self._merge_error_traces(snapshot)\n self._merge_custom_events(snapshot)\n self._merge_span_events(snapshot)\n self._merge_sql(snapshot)\n self._merge_traces(snapshot)", "def rollback(self, snapshot):\n\n if not self.__settings:\n return\n\n _logger.debug('Performing rollback of data into '\n 'subsequent harvest period. Metric data and transaction events'\n 'will be preserved and rolled into next harvest')\n\n self.merge_metric_stats(snapshot)\n self._merge_transaction_events(snapshot, rollback=True)\n self._merge_synthetics_events(snapshot, rollback=True)\n self._merge_error_events(snapshot)\n self._merge_custom_events(snapshot, rollback=True)\n self._merge_span_events(snapshot, rollback=True)", "def merge_custom_metrics(self, metrics):\n\n if not self.__settings:\n return\n\n for name, other in metrics:\n key = (name, '')\n stats = self.__stats_table.get(key)\n if not stats:\n self.__stats_table[key] = other\n else:\n stats.merge_stats(other)", "def _merge_report(self, target, new):\n time = None\n if 'ts' in new['parsed']:\n time = new['parsed']['ts']\n\n if (target.get('lastSeenDate', None) and\n time and\n target['lastSeenDate'] < time):\n target['lastSeenDate'] = time\n\n query_millis = int(new['parsed']['stats']['millis'])\n target['stats']['totalTimeMillis'] += query_millis\n target['stats']['count'] += 1\n target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']", "def _resample_and_merge(ts, agg_dict):\n grouped = ts.group_serie(agg_dict['sampling'])\n existing = agg_dict.get('return')\n name = agg_dict.get(\"name\")\n resource = None if name is None else mock.Mock(id=str(uuid.uuid4()))\n metric = mock.Mock(id=str(uuid.uuid4()), name=name)\n agg_dict['return'] = (\n processor.MetricReference(metric, \"mean\", resource),\n carbonara.AggregatedTimeSerie.from_grouped_serie(\n grouped,\n carbonara.Aggregation(agg_dict['agg'],\n agg_dict['sampling'],\n None)))\n if existing:\n existing[2].merge(agg_dict['return'][2])\n agg_dict['return'] = existing", "def _merge_report(self, target, new):\r\n time = None\r\n if 'ts' in new['parsed']:\r\n time = new['parsed']['ts']\r\n\r\n if (target.get('lastSeenDate', None) and\r\n time and\r\n target['lastSeenDate'] < time):\r\n target['lastSeenDate'] = time\r\n\r\n query_millis = int(new['parsed']['stats']['millis'])\r\n target['stats']['totalTimeMillis'] += query_millis\r\n target['stats']['count'] += 1\r\n target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']", "def merge(self, dataset):\n def merge_data(source, dest):\n for key, value in source.items():\n if isinstance(value, dict):\n merge_data(value, dest.setdefault(key, {}))\n else:\n dest[key] = value\n return dest\n\n merge_data(dataset.data, self._data)\n\n for h in dataset.task_history:\n if h not in self._task_history:\n self._task_history.append(h)", "def put_snapshot_object(session, key, data, snapshot=None):\n # type: (Session, Text, Any, Optional[Text]) -> None\n url_tail = \"/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_OBJECTS,\n )\n _put_stream(session, url_tail, data, {CoordConstsV2.QP_KEY: key})", "def merge_snapshot(self):\n disks = self.get_disks()\n disk_files_tree = []\n for disk in disks:\n disk_files_tree += (DiskImageHelper.get_backing_files_tree(disk.file))\n merge_snapshot_cmd = \"virsh blockpull --domain {domain_name} {disk_path} --wait\".format(\n domain_name=self.name, disk_path=disk.file)\n\n logging.debug(\"Executing: '%s'\" % merge_snapshot_cmd)\n logging.info(\"Merging base to new snapshot for '%s' device\" % disk.device)\n\n # launch command\n merge_snapshot_cmds = shlex.split(merge_snapshot_cmd)\n merge_snapshot = subprocess.Popen(merge_snapshot_cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n shell=False)\n\n # wait to terminate\n status = merge_snapshot.wait()\n\n if status != 0:\n logging.error(\"Error for '%s': %s\" % (merge_snapshot_cmd, merge_snapshot.stderr.read()))\n logging.critical(\"{exe} returned {status} state\".format(exe=merge_snapshot_cmds[0], status=status))\n raise Exception(\"blockpull didn't work properly\")\n\n current_disk_files = [disk.file for disk in self.get_disks()]\n\n # remove old disk device files without current ones\n for file in [disk_file_tree for disk_file_tree in disk_files_tree if disk_file_tree not in current_disk_files]:\n logging.info(\"Removing old disk file: '%s'\" % file)\n os.remove(file)", "def load_snapshot(base_path, snap_num, subvolumes, group, fields, matches):\n n_init = []\n\n snap_key = 'N{}_ThisFile_Redshift'.format('groups' if group == 'Haloprop' else 'subgroups')\n for subvolume in subvolumes: \n n_init.append(load_header(base_path, subvolume)[snap_key][snap_num])\n \n # initialize objects structure\n result = {}\n \n with h5py.File(file_path(base_path, subvolumes[0], 'subvolume'), 'r') as f:\n # galprop and haloprop both have a redshift quantity so we can use that to query for the snapshot we want\n filter_field = '{}Redshift'.format(group)\n \n if not fields:\n fields = list(f[group].keys())\n\n # make sure the redshift field is included in fields\n if filter_field not in fields:\n fields.append(filter_field) \n \n for field in fields:\n if field not in f[group].keys():\n raise Exception(\"Catalog does not have requested field [{}]!\".format(field))\n\n shape = list(f[group][field].shape)\n shape[0] = np.sum(n_init)\n\n # allocate within return dict\n result[field] = np.zeros(shape, dtype=f[group][field].dtype)\n\n if matches:\n with h5py.File(file_path(base_path, subvolumes[0], 'matches'), 'r') as f:\n for field in f[group].keys():\n result[field] = np.zeros(shape, dtype=f[group][field].dtype)\n\n header = load_header(base_path, subvolumes[0])\n filter_condition = header['Redshifts'][snap_num]\n\n offset = 0\n\n for subvolume in subvolumes:\n subvol_result = load_subvolume(base_path, subvolume, group, fields, matches, False)\n\n idx = subvol_result[filter_field][:] == filter_condition\n\n for field in subvol_result.keys():\n if len(subvol_result[field].shape) != 1:\n result[field][offset:offset+n_init[0], :] = subvol_result[field][idx]\n else:\n result[field][offset:offset+n_init[0]] = subvol_result[field][idx]\n\n offset += n_init[0]\n del n_init[0]\n \n return result", "def mergeAggregatedCsvData(self, contexts, obj, aggData1, aggData2):\n return aggData1 + aggData2", "def snapshot(snapshot_type, result_q, time_delta):", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.name = self.snapshot[0]\r\n self.size = self.snapshot[1]\r\n self.cells = self.snapshot[2]\r\n self.bucket_array.load_snapshot()", "def reduce_data():\n snapshots = Snapshot.objects.all()\n locations = Location.objects.all()\n lst = []\n for snapshot in snapshots:\n lst.append([snapshot.location.name, snapshot.avail_bikes,\n snapshot.free_stands, snapshot.timestamp])\n cols = ['location', 'avail_bikes', 'free_stands', 'timestamp']\n df = pd.DataFrame(lst, columns=cols)\n df['time'] = df['timestamp'].dt.round('30min').dt.strftime('%H:%M')\n\n group = df.groupby(['location', 'time'])\n means = group.mean()\n sd = group.std()\n today = date.today()\n first = today.replace(day=1)\n last_month = first - timedelta(days=1)\n\n for name, time in means.index:\n subset_mean = means.xs((name, time), level=(0, 1), axis=0)\n subset_sd = sd.xs((name, time), level=(0, 1), axis=0)\n m = Stat.objects.get_or_create(\n location=locations.get(name=name),\n avail_bikes_mean=subset_mean['avail_bikes'],\n free_stands_mean=subset_mean['free_stands'],\n avail_bikes_sd=subset_sd['avail_bikes'],\n free_stands_sd=subset_sd['free_stands'],\n time=time,\n month=last_month\n )\n\n # snaps = Snapshot.objects.all()\n # i = 0\n # length = len(snaps)\n # for s in snaps:\n # i += 1\n # print(i)\n # if i > 35000:\n # s.save()\n # reduce_data()", "def _aggregate_log_values(self, source, dest):\n remove = []\n for key, item in source.items():\n if \"data\" not in item:\n # Assume it's a sub-group\n dest[key] = {}\n self._aggregate_log_values(item, dest[key])\n else:\n aggregator = self._get_aggregator_for_key(key, item['agg'])\n value = aggregator(item['data'])\n if item['precision'] is not None:\n value = round(value, item['precision'])\n dest[key] = value\n if item['scope'] == 'get':\n remove.append(key)\n for key in remove:\n del source[key]", "def test_backup_merge_with_restore(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 2\n output, error = self.backup_restore()\n if error:\n self.fail(\"Restoring backup failed: {0}\".format(error))\n self.log.info(\"Finished restoring backup before merging\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.backupset.start = 1\n self.backupset.end = 1\n rest = RestConnection(self.backupset.restore_cluster_host)\n rest.flush_bucket()\n output, error = self.backup_restore()\n if error:\n self.fail(\"Restoring backup failed\")\n self.log.info(\"Finished restoring backup after merging\")", "def _AddSnapshot(self, snapshot):\n if self._history.count(snapshot) == 0:\n self._history.append(snapshot)", "def test_merge_sum(self):\n ars = self.ar[2009][11]['general']\n ars2 = awstats_reader.AwstatsReader(test_file_dir,\n 'joshuakugler.com')[2009][11]['general']\n self.assertEqual(ars.merge(ars2, 'LastUpdate', 'parsed'), 1262637)", "def aggregate_statistics(self, new_stats):\n \n if isinstance(new_stats,RunStatistics):\n new_stats = [new_stats, ]\n elif isinstance(new_stats,list):\n if any(not isinstance(_,RunStatistics) for _ in new_stats):\n raise MadGraph5Error, \"The 'new_stats' argument of the function \"+\\\n \"'updtate_statistics' must be a (possibly list of) \"+\\\n \"RunStatistics instance.\"\n \n keys = set([])\n for stat in [self,]+new_stats:\n keys |= set(stat.keys())\n\n new_stats = new_stats+[self,]\n for key in keys:\n # Define special rules\n if key=='max_precision':\n # The minimal precision corresponds to the maximal value for PREC\n self[key] = min( _[key] for _ in new_stats if key in _)\n elif key=='min_precision':\n # The maximal precision corresponds to the minimal value for PREC\n self[key] = max( _[key] for _ in new_stats if key in _)\n elif key=='averaged_timing':\n n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if\n 'n_madloop_calls' in _)\n if n_madloop_calls > 0 :\n self[key] = sum(_[key]*_['n_madloop_calls'] for _ in \n new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls\n else:\n # Now assume all other quantities are cumulative\n self[key] = sum(_[key] for _ in new_stats if key in _)", "def add_snapshot(self):\n\n\t\tself.mu_values = self.cvt_handler.mu_values\n\t\tdim_mu = self.mu_values.shape[1]\n\t\taux_snapshot = self.file_handler.parse(self.namefile_prefix + str(dim_mu-1) + self.file_format, self.output_name)\n\t\tsnapshot = aux_snapshot.reshape(aux_snapshot.shape[0],1)\n\t\tself.snapshots = np.append(self.snapshots, snapshot, 1)\n\t\t\n\t\tself.print_info()", "def snapshot(self, snapshot):\n self._context[\"snapshot\"] = snapshot", "def snapshot(self, agent_memory):\n\n read_cmd = \"SELECT \"\n for r in self.TABLE_COLUMNS:\n read_cmd += r + \", \"\n read_cmd = read_cmd.strip(\", \")\n read_cmd += \" FROM \" + self.TABLE + \" WHERE uuid=?\"\n data = agent_memory._db_read_one(read_cmd, self.memid)\n if not data:\n raise (\"tried to snapshot nonexistent memory\")\n\n archive_memid = self.new(agent_memory, snapshot=True)\n new_data = list(data)\n new_data[0] = archive_memid\n\n if hasattr(self, \"ARCHIVE_TABLE\"):\n archive_table = self.ARCHIVE_TABLE\n else:\n archive_table = self.TABLE\n write_cmd = \"INSERT INTO \" + archive_table + \"(\"\n qs = \"\"\n for r in self.TABLE_COLUMNS:\n write_cmd += r + \", \"\n qs += \"?, \"\n write_cmd = write_cmd.strip(\", \")\n write_cmd += \") VALUES (\" + qs.strip(\", \") + \")\"\n agent_memory._db_write(write_cmd, *new_data)\n link_archive_to_mem(agent_memory, self.memid, archive_memid)", "def add_snapshot(self, dest, source=None):\n raise NotImplementedYet()", "def RestoreSnapshot(\r\n self,\r\n snapshot_data: Any,\r\n ):\r\n\r\n (\r\n self_id,\r\n iter,\r\n num_results,\r\n ignore_whitespace_ctr,\r\n ) = snapshot_data\r\n\r\n assert self_id == id(self)\r\n assert iter.Offset <= self.normalized_iter.Offset\r\n assert num_results <= len(self.results)\r\n assert ignore_whitespace_ctr <= self._ignore_whitespace_ctr\r\n\r\n self.normalized_iter = iter\r\n self._ignore_whitespace_ctr = ignore_whitespace_ctr\r\n\r\n if len(self.results) != num_results:\r\n del self.results[num_results - len(self.results):]", "def test_merge_aggregate_traditional(self):\n mdict = copy.deepcopy(self.dict1)\n mdict[\"A\"] = \"b\"\n ret = dictupdate.merge_overwrite(copy.deepcopy(self.dict1), {\"A\": \"b\"})\n self.assertEqual(mdict, ret)", "def restore(self, snapshot):\n self.unit_name = snapshot[\"unit_name\"]", "def take_snapshot(self):\r\n self.snapshot = self.name, self.size, copy.copy(self.cells)\r\n self.bucket_array.take_snapshot()", "def perform_snapshot(context, region, installed_region='us-east-1'):\n LOG.info('Reviewing snapshots in region %s', region)\n\n # fetch these, in case we need to figure out what applies to an instance\n configurations = dynamo.list_configurations(context, installed_region)\n LOG.debug('Fetched all possible configuration rules from DynamoDB')\n\n # build a list of any IDs (anywhere) that we should ignore\n ignore_ids = utils.build_ignore_list(configurations)\n\n # setup some lookup tables\n cache_data = utils.build_cache_maps(context, configurations, region, installed_region)\n all_instances = cache_data['instance_id_to_data']\n instance_configs = cache_data['instance_id_to_config']\n volume_snap_recent = cache_data['volume_id_to_most_recent_snapshot_date']\n\n for instance_id in set(all_instances.keys()):\n # before we go do some work\n if timeout_check(context, 'perform_snapshot'):\n break\n\n if instance_id in ignore_ids:\n continue\n\n snapshot_settings = instance_configs[instance_id]\n\n # parse out snapshot settings\n retention, frequency = utils.parse_snapshot_settings(snapshot_settings)\n\n # grab the data about this instance id, if we don't already have it\n instance_data = all_instances[instance_id]\n\n ami_id = instance_data['ImageId']\n LOG.info('Reviewing snapshots in region %s on instance %s', region, instance_id)\n\n for dev in instance_data.get('BlockDeviceMappings', []):\n # before we go make a bunch more API calls\n if timeout_check(context, 'perform_snapshot'):\n break\n\n # we probably should have been using volume keys from one of the\n # caches here, but since we're not, we're going to have to check here too\n LOG.debug('Considering device %s', dev)\n volume_id = dev['Ebs']['VolumeId']\n\n if volume_id in ignore_ids:\n continue\n\n # find snapshots\n recent = volume_snap_recent.get(volume_id)\n now = datetime.datetime.now(dateutil.tz.tzutc())\n\n # snapshot due?\n if should_perform_snapshot(frequency, now, volume_id, recent):\n LOG.debug('Performing snapshot for %s, calculating tags', volume_id)\n else:\n LOG.debug('NOT Performing snapshot for %s', volume_id)\n continue\n\n # perform actual snapshot and create tag: retention + now() as a Y-M-D\n delete_on_dt = now + retention\n delete_on = delete_on_dt.strftime('%Y-%m-%d')\n\n volume_data = utils.get_volume(volume_id, region=region)\n expected_tags = utils.calculate_relevant_tags(\n instance_data.get('Tags', None),\n volume_data.get('Tags', None))\n\n utils.snapshot_and_tag(\n instance_id,\n ami_id,\n volume_id,\n delete_on,\n region,\n additional_tags=expected_tags)", "def merge(coverage_frame, ck_frame='metrics/ck.csv', halstead_frame='metrics/halstead.csv',\n jdepend_frame='metrics/jdepend.csv',\n keywords_frame='metrics/keywords.csv'):\n ck = pd.read_csv(ck_frame)\n halstead = pd.read_csv(halstead_frame)\n jdepend = pd.read_csv(jdepend_frame)\n keywords = pd.read_csv(keywords_frame)\n\n coverage_col = 'TARGET_CLASS' if 'TARGET_CLASS' in coverage_frame.columns else 'class'\n\n merge_frame = coverage_frame.merge(ck, left_on=coverage_col, right_on='class')\n merge_frame = merge_frame.merge(halstead, on='class')\n merge_frame = merge_frame.merge(jdepend, on='class')\n merge_frame = merge_frame.merge(keywords, left_on='class', right_on='class-name')\n return merge_frame", "def merge(*args):\n return _libsbml.Unit_merge(*args)", "def update_snapshot_data(self, sSnapshotUuid, sNewName, sNewDescription = ''):\n\t\treturn Job(SDK.PrlVm_UpdateSnapshotData(self.handle, sSnapshotUuid, sNewName, sNewDescription)[0])", "def _aggregate_traj_stats(traj_stats_a, traj_stats_b):\n merged_stats = {}\n for k in traj_stats_a:\n n_a, avg_a, M2_a = traj_stats_a[k][\"n\"], traj_stats_a[k][\"mean\"], traj_stats_a[k][\"sqdiff\"]\n n_b, avg_b, M2_b = traj_stats_b[k][\"n\"], traj_stats_b[k][\"mean\"], traj_stats_b[k][\"sqdiff\"]\n n = n_a + n_b\n mean = (n_a * avg_a + n_b * avg_b) / n\n delta = (avg_b - avg_a)\n M2 = M2_a + M2_b + (delta ** 2) * (n_a * n_b) / n\n merged_stats[k] = dict(n=n, mean=mean, sqdiff=M2)\n return merged_stats", "def merge_time_metric(self, metric):\n\n self.merge_raw_time_metric(metric.duration, metric.exclusive)", "def deserialize_snapshot(self, serialized_snapshot):\n snapshot = list(serializers.deserialize(\n 'python', [serialized_snapshot]\n ))[0].object\n snapshot.__version__ = serialized_snapshot['version']\n snapshot.__extra_fields__ = serialized_snapshot['extra_fields']\n # override extra fields\n for name, value in serialized_snapshot['extra_fields'].items():\n if value:\n if isinstance(value, dict):\n value = self.deserialize_snapshot(value)\n setattr(snapshot, name, value)\n return snapshot", "def do_takesnapshot(self, str_arg):\n img = None\n fname = validateString(str_arg)\n try:\n # self.adbc.wake()\n printLog(self.threadName + 'taking snapshot (0,50,%d,%d) ...' %\n (self.scn_width, self.scn_height))\n img = self.adbc.takeSnapshot(reconnect=True)\n # PIL code\n img = img.crop((0, 50, self.scn_width, self.scn_height))\n img.save(fname, SNAPSHOT_IMAGE_FORMAT)\n # if self.scn_width>SNAPSHOT_WIDTH:\n # self.compressImage(fname)\n # os.remove(fname)\n # im.save(fname)\n printLog(self.threadName + 'snapshot saved as %s' % fname)\n except EnvironmentError:\n self.resultFlag = False\n if DEBUG:\n traceback.print_exc()\n finally:\n img = None", "def scrub_snapshot(snapshot_id: int):\n span = opentracing.tracer.start_span('tasks.scrub_snapshot')\n span.set_tag('snapshot_id', snapshot_id)\n _scrub_snapshot(snapshot_id, span)\n span.finish()\n # Flush the loggers here so it's not in the span\n utils.flush_logstash()", "def test_backup_merge_with_unmerged(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 2\n self.log.info(\"Merging existing incremental backups\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.log.info(\"Taking more backups\")\n self._take_n_backups(n=2)\n self.backupset.start = 1\n self.backupset.end = 3\n self.log.info(\"Merging new backups into already merged backup\")\n status, output, message = self.backup_merge()\n if not status:\n self.fail(message)\n self.log.info(\"Successfully merged new backups with already merged backup\")", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.gain = self.snapshot[0]\r\n self.block = self.snapshot[1]\r\n self.locked = self.snapshot[2]\r\n self.bucket_num = self.snapshot[3]", "def __push_aggregation(self, table, sub_table, table_id, sub_table_id):\n table_entries = self.__postgre_db.get_data_from_table(table)\n for entry in table_entries:\n aggregation = 0\n entry_id = entry[table_id]\n entries_to_look_up = entry[sub_table_id]\n\n for look_up in entries_to_look_up:\n # calcutate aggregations differently depending on how the table structure is\n if len(entries_to_look_up) > 1:\n stored_value = self.__postgre_db.get(sub_table, sub_table_id + \"=\" + str(look_up), \"aggregation\")\n if stored_value is None:\n stored_value = 0\n aggregation += stored_value\n\n else:\n query = \"SELECT SUM(aggregation) FROM \" + sub_table + \" WHERE \" + sub_table_id + \"=\" + str(look_up)\n aggregation = self.__postgre_db.query(query)[0]['sum']\n if aggregation is None:\n aggregation = 0\n\n self.__postgre_db.update(table, \"aggregation=\" + str(aggregation), table_id + \"=\" + str(entry_id))", "def restore_from_snapshot(SnapshotId=None):\n pass", "def get_health_data_and_ingest_into_sentinel(self):\n self.pull_and_push_the_snapshot_data(\n HEALTH_ENDPOINT, self.health_table_name, fields=MODIFIED_FIELDS\n )", "def servicemanage_glance_metadata_copy_to_servicemanage(context, servicemanage_id, snapshot_id,\n session=None):\n if session is None:\n session = get_session()\n\n metadata = servicemanage_snapshot_glance_metadata_get(context, snapshot_id,\n session=session)\n with session.begin():\n for meta in metadata:\n vol_glance_metadata = models.ServiceManageGlanceMetadata()\n vol_glance_metadata.servicemanage_id = servicemanage_id\n vol_glance_metadata.key = meta['key']\n vol_glance_metadata.value = meta['value']\n\n vol_glance_metadata.save(session=session)", "def get_experiment_metric_snapshot_v1(self, skill_id, experiment_id, metric_snapshot_id, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, GetExperimentMetricSnapshotResponse_b6905a35, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_experiment_metric_snapshot_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'experiment_id' is set\n if ('experiment_id' not in params) or (params['experiment_id'] is None):\n raise ValueError(\n \"Missing the required parameter `experiment_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'metric_snapshot_id' is set\n if ('metric_snapshot_id' not in params) or (params['metric_snapshot_id'] is None):\n raise ValueError(\n \"Missing the required parameter `metric_snapshot_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/experiments/{experimentId}/metricSnapshots/{metricSnapshotId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'experiment_id' in params:\n path_params['experimentId'] = params['experiment_id']\n if 'metric_snapshot_id' in params:\n path_params['metricSnapshotId'] = params['metric_snapshot_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.experiment.get_experiment_metric_snapshot_response.GetExperimentMetricSnapshotResponse\", status_code=200, message=\"Returned experiment metric data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.experiment.get_experiment_metric_snapshot_response.GetExperimentMetricSnapshotResponse\")\n\n if full_response:\n return api_response\n return api_response.body", "def __add__(self, other):\n merged_profile = super().__add__(other)\n\n # struct specific property merging\n merged_profile.row_has_null_count = \\\n self.row_has_null_count + other.row_has_null_count\n merged_profile.row_is_null_count = \\\n self.row_is_null_count + other.row_is_null_count\n merged_profile.hashed_row_dict.update(self.hashed_row_dict)\n merged_profile.hashed_row_dict.update(other.hashed_row_dict)\n\n self_to_other_idx = self._get_and_validate_schema_mapping(self._col_name_to_idx,\n other._col_name_to_idx)\n\n # merge profiles\n for idx in range(len(self._profile)):\n other_idx = self_to_other_idx[idx]\n merged_profile._profile.append(self._profile[idx] +\n other._profile[other_idx])\n\n # schemas are asserted to be identical\n merged_profile._col_name_to_idx = copy.deepcopy(self._col_name_to_idx)\n\n # merge correlation\n if (self.options.correlation.is_enabled\n and other.options.correlation.is_enabled):\n merged_profile.correlation_matrix = self._merge_correlation(other)\n\n # recompute chi2 if needed\n if self.options.chi2_homogeneity.is_enabled and \\\n other.options.chi2_homogeneity.is_enabled:\n\n chi2_mat1 = self.chi2_matrix\n chi2_mat2 = other.chi2_matrix\n n1 = self.total_samples - self.row_is_null_count\n n2 = other.total_samples - other.row_is_null_count\n if n1 == 0:\n merged_profile.chi2_matrix = chi2_mat2\n elif n2 == 0:\n merged_profile.chi2_matrix = chi2_mat1\n elif chi2_mat1 is None or chi2_mat2 is None:\n merged_profile.chi2_matrix = None\n else:\n merged_profile.chi2_matrix = merged_profile._update_chi2()\n\n return merged_profile", "def take_snapshot():\n df = scrape()\n for i in df.index:\n single = df.loc[i]\n # create or get locations\n loc, created = Location.objects.get_or_create(\n name=single['Location'],\n all_stands=single['Stands'],\n coordinates=single['Coords']\n )\n # add a new snapshot\n obj = Snapshot(\n location=loc,\n avail_bikes=single['Bikes'],\n free_stands=single['Free stands'],\n timestamp=datetime.now(tz=timezone('Europe/Warsaw'))\n )\n obj.save()", "def aggregate(all_metrics, reducer, suffix):\n # Collect metric separately\n separated_metrics = {} # type: dict[frozenset, list[dict]]\n for el in all_metrics:\n key = frozenset(el[\"metric\"][\"dimensions\"].items())\n if key not in separated_metrics:\n separated_metrics[key] = [el]\n else:\n separated_metrics[key].append(el)\n\n # Collect all dimensions\n dims = {}\n for metric_dims in separated_metrics.keys():\n for prop, val in dict(metric_dims).iteritems():\n if prop in dims:\n dims[prop].add(val)\n else:\n dims[prop] = set(val)\n\n # Sort each metric\n for _, metric in separated_metrics.iteritems():\n metric.sort(key=lambda v: v[\"metric\"][\"timestamp\"])\n\n separated_metrics = sorted(separated_metrics.values(), key=len)\n separated_metrics.reverse()\n\n # Compute the new values\n new_values = []\n all_timestamps = map(\n lambda l: map(\n lambda x: x[\"metric\"][\"timestamp\"], l),\n separated_metrics)\n metric_count = len(separated_metrics)\n for index in range(0, len(separated_metrics[0])):\n new_value = reducer[0](\n separated_metrics[0][index][\"metric\"][\"value\"],\n metric_count)\n new_timestamp = separated_metrics[0][index][\"metric\"][\"timestamp\"]\n for metric_index in range(1, metric_count):\n new_value = reducer[1](new_value, helpers.interpolate(\n new_timestamp,\n separated_metrics[metric_index],\n all_timestamps[metric_index]\n ), metric_count)\n new_values.append((new_timestamp, new_value))\n\n # Aggregate the other details:\n metric_name = separated_metrics[0][0][\"metric\"][\"name\"] + suffix\n meta = separated_metrics[0][0][\"meta\"]\n new_metrics = [\n helpers.create_agg_metric(\n metric_name,\n meta,\n dims,\n val[0],\n val[1]\n ) for val in new_values\n ]\n return new_metrics", "def merge_stats(self, other):\n\n self[0] += other[0]\n self[1] += other[1]\n self[2] += other[2]\n\n self[3] = ((self[0] or self[1] or self[2]) and\n min(self[3], other[3]) or other[3])\n self[4] = max(self[4], other[3])", "def take_snapshot(self):\r\n self.snapshot = self.gain, self.block, self.locked, self.bucket_num", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.blockA = self.snapshot[0]\r\n self.blockB = self.snapshot[1]\r\n self.blockA_locked = self.snapshot[2]\r\n self.blockB_locked = self.snapshot[3]\r\n self.blockA_free = self.snapshot[4]\r\n self.blockB_free = self.snapshot[5]\r\n self.blockA_cells = self.snapshot[6]\r\n self.blockB_cells = self.snapshot[7]\r\n self.cut = self.snapshot[8]", "def merge_logs(self):\n ourlog = LogData()\n for l in self.data_set:\n ourlog.entries = ourlog.entries + l.entries\n ourlog.sort_time()\n self.finalized_data = ourlog", "def load_snapshot(self):\r\n assert self.snapshot is not None\r\n self.max_gain = self.snapshot[0]\r\n self.array = self.snapshot[1]\r\n self.free_cell_list = self.snapshot[2]", "def combine_dict(self, dict2):\n # iterate through smaller data set\n # base_set will be the larger set and is used for updating\n if len(self.content[\"values\"]) > len(dict2[\"values\"]):\n large_set = self.content[\"values\"]\n small_set = dict2[\"values\"]\n base_set = self.content\n else:\n small_set = self.content[\"values\"]\n large_set = dict2[\"values\"]\n base_set = dict2\n\n subset = {}\n for key in small_set.keys():\n # determine wether to compare keys\n if key in large_set:\n updated_l = large_set[key][\"updated_at\"]\n updated_s = small_set[key][\"updated_at\"]\n if updated_l == 'NULL':\n if updated_s != 'NULL':\n # update to not NULL set\n # if both updated_at are NULL, things\n # are ambiguos. We could defer to created_at\n # but for simplicity we will default to\n # the values in the larger set\n subset[key] = small_set[key]\n else:\n if updated_s == 'NULL':\n # update to not NULL set\n subset[key] = large_set[key]\n else:\n if updated_l > updated_s:\n subset[key] = large_set[key]\n else:\n subset[key] =small_set[key]\n else:\n subset[key] = small_set[key]\n base_set[\"values\"].update(subset)\n new_obj = BackupData()\n new_obj.load_from_dict(base_set)\n return new_obj", "def create_snapshot(store, dataset, snapshot, description_fields, snapshot_changes):\n validate_snapshot_name(store, dataset, snapshot)\n validate_datalad_config(store, dataset)\n update_description(store, dataset, description_fields)\n update_changes(store, dataset, snapshot, snapshot_changes)\n save_snapshot(store, dataset, snapshot)\n return get_snapshot(store, dataset, snapshot)", "def create_snapshot(self, snapshot):\n vg_name = self.get_volume_group_name(snapshot.volume_id)\n snap_name = self.get_snap_name(snapshot.id)\n rpolicy = self.get_policy()\n try:\n LOG.debug(\"Searching volume_group: %s in K2.\", vg_name)\n vg = self.client.search(\"volume_groups\", name=vg_name).hits[0]\n LOG.debug(\"Creating a snapshot: %(snap)s from vg: %(vg)s\",\n {'snap': snap_name, 'vg': vg_name})\n self.client.new(\"snapshots\", short_name=snap_name,\n source=vg, retention_policy=rpolicy,\n is_auto_deleteable=False).save()\n except Exception as ex:\n LOG.exception(\"Creation of snapshot: %s failed.\", snap_name)\n raise KaminarioCinderDriverException(reason=ex)", "def merge_accumulators(self, accumulators):\n raise NotImplementedError", "def test_merge_backup_with_multi_threads(self):\n gen = BlobGenerator(\"ent-backup1\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.log.info(\"Start doing backup\")\n self.backup_create()\n self.backup_cluster()\n gen = BlobGenerator(\"ent-backup2\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_cluster(self.threads_count)\n self.backupset.number_of_backups += 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n if int(self.backupset.number_of_backups) == 2:\n self.backupset.end = 2\n elif int(self.backupset.number_of_backups) > 2:\n self.backupset.end = randrange(self.backupset.start,\n self.backupset.number_of_backups + 1)\n self.merged = True\n status, output, _ = self.backup_merge()\n self.backupset.end -= 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n current_vseqno = self.get_vbucket_seqnos(self.cluster_to_backup, self.buckets,\n self.skip_consistency, self.per_node)\n self.log.info(\"*** Start to validate data in merge backup \")\n self.validate_backup_data(self.backupset.backup_host, [self.master],\n \"ent-backup\", False, False, \"memory\",\n self.num_items, None)\n self.backup_cluster_validate(skip_backup=True)", "def update(self, group_snapshot, **kwargs):\n if not kwargs:\n return\n\n body = {\"group_snapshot\": kwargs}\n\n return self._update(\"/group_snapshots/%s\" % base.getid(group_snapshot),\n body)", "def format_snapshot(self, snapshot, user_id, data_path):\n snapshot_obj = Snapshot.FromString(snapshot)\n json_snapshot = {\n \"datetime\": self._format_datetime(snapshot_obj.datetime),\n \"depth_image\": self._depth_image_handler(snapshot_obj, user_id,\n data_path),\n \"color_image\": self._color_image_handler(snapshot_obj, user_id,\n data_path),\n \"feelings\": self._feeling_handler(snapshot_obj),\n \"pose\": self._pose_handler(snapshot_obj),\n }\n return json_snapshot", "def _merge(self, other: dict):\n self._storage = dict_merge(self._storage, other)", "def test_merge_backup_from_old_and_new_bucket(self):\n gen = BlobGenerator(\"ent-backup1_\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.log.info(\"Start doing backup\")\n self.backup_create()\n self.backup_cluster()\n if self.bucket_delete:\n self.log.info(\"Start to delete bucket\")\n BucketOperationHelper.delete_all_buckets_or_assert([self.master], self)\n BucketOperationHelper.create_bucket(serverInfo=self.master, test_case=self)\n elif self.bucket_flush:\n self.log.info(\"Start to flush bucket\")\n self._all_buckets_flush()\n gen = BlobGenerator(\"ent-backup2_\", \"ent-backup-\", self.value_size, end=self.num_items)\n self.log.info(\"Start to load bucket again with different key\")\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_cluster()\n self.backupset.number_of_backups += 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n self.log.info(\"Start to merge backup\")\n self.backupset.start = randrange(1, self.backupset.number_of_backups)\n self.backupset.end = self.backupset.number_of_backups\n self.merged = True\n result, output, _ = self.backup_merge()\n self.backupset.end -= 1\n status, output, message = self.backup_list()\n if not status:\n self.fail(message)\n current_vseqno = self.get_vbucket_seqnos(self.cluster_to_backup, self.buckets,\n self.skip_consistency, self.per_node)\n self.log.info(\"*** Start to validate data in merge backup \")\n self.validate_backup_data(self.backupset.backup_host, [self.master],\n \"ent-backup\", False, False, \"memory\",\n self.num_items, \"ent-backup1\")\n self.backup_cluster_validate(skip_backup=True)", "def __add__(self, other):\n if self.xml.find('mosromgrmeta') is None or isinstance(other, RunningOrderControl):\n return other.merge(self)\n raise MosCompletedMergeError(\"Cannot merge completed MOS file\")", "def take_snapshot(self):\r\n self.snapshot = self.blockA, self.blockB, self.blockA_locked, self.blockB_locked, self.blockA_free, \\\r\n self.blockB_free, copy.copy(self.blockA_cells), copy.copy(self.blockB_cells), self.cut", "def database_volume_snapshot_add(volume_snapshot_obj):\n db = database_get()\n session = db.session()\n query = session.query(model.VolumeSnapshot)\n query = query.filter(model.VolumeSnapshot.uuid == volume_snapshot_obj.uuid)\n volume_snapshot = query.first()\n if not volume_snapshot:\n volume_snapshot = model.VolumeSnapshot()\n volume_snapshot.uuid = volume_snapshot_obj.uuid\n volume_snapshot.name = volume_snapshot_obj.name\n volume_snapshot.description = volume_snapshot_obj.description\n volume_snapshot.size_gb = volume_snapshot_obj.size_gb\n volume_snapshot.volume_uuid = volume_snapshot_obj.volume_uuid\n volume_snapshot.nfvi_volume_snapshot_data = \\\n json.dumps(volume_snapshot_obj.nfvi_volume_snapshot.as_dict())\n session.add(volume_snapshot)\n else:\n volume_snapshot.name = volume_snapshot_obj.name\n volume_snapshot.description = volume_snapshot_obj.description\n volume_snapshot.size_gb = volume_snapshot_obj.size_gb\n volume_snapshot.volume_uuid = volume_snapshot_obj.volume_uuid\n volume_snapshot.nfvi_volume_snapshot_data = \\\n json.dumps(volume_snapshot_obj.nfvi_volume_snapshot.as_dict())\n db.commit()", "def merge_stats(self, other):\n\n self[1] += other[1]\n self[2] += other[2]\n self[3] = self[0] and min(self[3], other[3]) or other[3]\n self[4] = max(self[4], other[4])\n self[5] += other[5]\n\n # Must update the call count last as update of the\n # minimum call time is dependent on initial value.\n\n self[0] += other[0]", "def redo(self):\n if self._snapshot_index <= len(self._snapshots) - 2:\n snapshot = self._snapshots[self._snapshot_index + 1]\n for chunk_location in snapshot:\n dimension, cx, cz = chunk_location\n chunk = self._unserialise_chunk(dimension, cx, cz, 1)\n self._chunk_cache[chunk_location] = chunk\n self._snapshot_index += 1", "def snapshot(model, criterion, losses, epoch, snapshot_prefix,\n log, optimizer=None):\n\n snapshot_dir = op.dirname(snapshot_prefix)\n if not op.exists(snapshot_dir):\n os.makedirs(snapshot_dir)\n\n if op.basename(snapshot_prefix) != \"model\":\n snapshot_prefix = op.join(snapshot_dir, \"model\")\n\n snapshot_pt = get_snapshot(snapshot_prefix, epoch)\n snapshot_losses_pt = snapshot_prefix + \"_losses.pt\"\n\n state = {\n 'epochs': epoch,\n 'state_dict': model.state_dict(),\n 'seen_images': criterion.seen_images,\n 'region_target.biases': criterion.criterion.region_target.biases,\n 'region_target.seen_images': criterion.criterion.seen_images\n }\n\n if optimizer:\n state.update({\n 'optimizer': optimizer.state_dict(),\n })\n\n log.verbose(\"Snapshotting to: {}\".format(snapshot_pt))\n torch.save(state, snapshot_pt)\n torch.save(losses, snapshot_losses_pt)", "def merge(self, other: PerfData):\n self.total_samples += other.total_samples\n if self.total_time == 0.0:\n self.total_time = other.total_time\n self.compile_time = max(self.compile_time, other.compile_time)\n self.programming_time = max(\n self.programming_time, other.programming_time\n )\n if self.est_samples_per_sec == 0.0:\n self.est_samples_per_sec = other.est_samples_per_sec\n else:\n assert (\n self.est_samples_per_sec == other.est_samples_per_sec\n ), \"Expected all fabric-based performance estimates to be identical\"\n\n if self.total_time > 0:\n self.samples_per_sec = float(self.total_samples) / self.total_time\n else:\n self.samples_per_sec = 0.0", "def process_and_write_aggregate_results(\n aggregate_metrics: List[Dict],\n aggregate_stats: List[Dict],\n configuration: Dict,\n args: argparse.Namespace,\n dataset_id: str,\n) -> None:\n (\n averaged_metrics,\n averaged_stats,\n ) = fanatic.metrics.average_metrics_stats_from_seed_runs(aggregate_metrics, aggregate_stats)\n\n fanatic.output.save_averaged_results(averaged_metrics, averaged_stats, configuration, args, dataset_id)\n\n final_metric = averaged_metrics[\"ami\"][\"mean\"]\n logger.info(f\"For dataset_id={dataset_id} final averaged ami metric={final_metric}\")", "def update(snapshot_data: Dict[str, Any], span: Span) -> bool:\n snapshot_id = snapshot_data['id']\n # Generate the necessary template data\n child_span = opentracing.tracer.start_span('generate_template_data', child_of=span)\n template_data = Linux._get_template_data(snapshot_data, child_span)\n child_span.finish()\n\n # Check that the data was successfully generated\n if template_data is None:\n error = f'Failed to retrieve template data for Snapshot #{snapshot_id}'\n Linux.logger.error(error)\n snapshot_data['errors'].append(error)\n span.set_tag('failed_reason', 'template_data_failed')\n return False\n\n # Check that all the necessary keys are present\n if not all(template_data[key] is not None for key in Linux.template_keys):\n missing_keys = [f'\"{key}\"' for key in Linux.template_keys if template_data[key] is None]\n error_msg = f'Template Data Error, the following keys were missing from the Snapshot update data: ' \\\n f'{\", \".join(missing_keys)}.'\n Linux.logger.error(error_msg)\n span.set_tag('failed_reason', 'template_data_keys_missing')\n return False\n\n # If everything is okay, commence updating the snapshot\n host_ip = template_data.pop('host_ip')\n # Generate the update command using the template data\n child_span = opentracing.tracer.start_span('generate_command', child_of=span)\n cmd = utils.JINJA_ENV.get_template('snapshot/kvm/commands/update.j2').render(**template_data)\n child_span.finish()\n\n Linux.logger.debug(f'Generated Snapshot Update command for Snapshot #{snapshot_id}\\n{cmd}')\n\n # Open a client and run the necessary command on the host\n updated = False\n client = SSHClient()\n client.set_missing_host_key_policy(AutoAddPolicy())\n key = RSAKey.from_private_key_file('/root/.ssh/id_rsa')\n sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)\n try:\n # Try connecting to the host and running the necessary commands\n sock.connect((host_ip, 22))\n client.connect(\n hostname=host_ip,\n username='administrator',\n pkey=key,\n timeout=30,\n sock=sock,\n ) # No need for password as it should have keys\n span.set_tag('host', host_ip)\n\n # Attempt to execute the update command\n Linux.logger.debug(f'Executing update command for Snapshot #{snapshot_id}')\n\n child_span = opentracing.tracer.start_span('update_snapshot', child_of=span)\n stdout, stderr = Linux.deploy(cmd, client, child_span)\n child_span.finish()\n\n if stdout:\n Linux.logger.debug(f'Snapshot Update command for Snapshot #{snapshot_id} generated stdout. \\n{stdout}')\n updated = True\n if stderr:\n Linux.logger.error(f'Snapshot update command for Snapshot #{snapshot_id} generated stderr. \\n{stderr}')\n except (OSError, SSHException) as err:\n error = f'Exception occurred while updating Snapshot #{snapshot_id} in {host_ip}.'\n Linux.logger.error(error, exc_info=True)\n snapshot_data['errors'].append(f'{error} Error: {err}')\n span.set_tag('failed_reason', 'ssh_error')\n finally:\n client.close()\n return updated", "def test_update_risk_snapshot(self):\n with factories.single_commit():\n program = factories.ProgramFactory(title=\"P1\")\n risk = factories.RiskFactory(title=\"R1\")\n risk_id = risk.id\n factories.RelationshipFactory(source=program, destination=risk)\n # Risk snapshot created for audit during mapping audit to program\n self.objgen.generate_object(all_models.Audit, {\n \"title\": \"A1\",\n \"program\": {\"id\": program.id},\n \"status\": \"Planned\",\n \"snapshots\": {\n \"operation\": \"create\",\n },\n })\n # Update risk to get outdated snapshot (new risk revision)\n risk = all_models.Risk.query.get(risk_id)\n self.api.put(risk, risk.id, {\n \"title\": \"New risk title\",\n })\n audit = all_models.Audit.query.filter_by(title=\"A1\").one()\n snapshot = all_models.Snapshot.query.first()\n self.assertEquals(audit, snapshot.parent)\n\n # Update snapshot to the latest revision\n response = self.api.put(snapshot, snapshot.id, {\n \"update_revision\": \"latest\",\n })\n\n self.assert200(response)\n self.assertTrue(response.json[\"snapshot\"][\"is_latest_revision\"])", "def update(self, snapshot, **kwargs):\n if not kwargs:\n return\n\n body = {\"snapshot\": kwargs}\n\n return self._update(\"/snapshots/%s\" % base.getid(snapshot), body)", "def get_snapshot_object(session, key, snapshot=None):\n # type: (Session, Text, Optional[Text]) -> Any\n url_tail = \"/{}/{}/{}/{}/{}\".format(\n CoordConstsV2.RSC_NETWORKS,\n session.network,\n CoordConstsV2.RSC_SNAPSHOTS,\n session.get_snapshot(snapshot),\n CoordConstsV2.RSC_OBJECTS,\n )\n return _get_stream(session, url_tail, {CoordConstsV2.QP_KEY: key})", "def update(start_date, end_date, binning):\n global mean_source1,mean_source,median_source1,median_source, difference_source1,difference_source, difference_source2 , difference_source3, mean_source2, median_source2\n\n # if type of start/end date is date, turn it into a datetime,\n # set time of start/end date time to 12:00\n\n def convert_time(t):\n if type(t) == datetime.date:\n return datetime.datetime(t.year, t.month, t.day, 12, 0, 0, 0)\n else:\n return t.replace(hour=12, minute=0, second=0, microsecond=0)\n\n start_date = convert_time(start_date)\n end_date = convert_time(end_date)\n if binning is None:\n binning = ''\n\n first_timestamp = start_date.timestamp() + time_offset\n second_timestamp = end_date.timestamp() + time_offset\n\n # query data in mysql database\n sql1 = 'SELECT str_to_date(datetime,\"%%Y-%%m-%%d %%H:%%i:%%s\") AS datetime, seeing from seeing ' \\\n ' where datetime >= str_to_date(\"{start_date_}\",\"%%Y-%%m-%%d %%H:%%i:%%s\")' \\\n ' and datetime <= str_to_date(\"{end_date_}\",\"%%Y-%%m-%%d %%H:%%i:%%s\") ' \\\n .format(start_date_=str(start_date), end_date_=str(end_date))\n\n sql2 = 'select _timestamp_,ee50,fwhm,timestamp from tpc_guidance_status__timestamp where timestamp >= {start_date_}' \\\n ' and timestamp<= {end_date_} and guidance_available=\"T\" ' \\\n ' order by _timestamp_' \\\n .format(start_date_=str(first_timestamp), end_date_=str(second_timestamp))\n\n df2 = pd.read_sql(sql2, db.get_engine(app=current_app, bind='els'))\n df1 = pd.read_sql(sql1, db.get_engine(app=current_app, bind='suthweather'))\n\n # setting index time for calculating mean and average\n df2.index = df2[\"_timestamp_\"]\n df1.index = df1['datetime']\n\n # It seems that Pandas doesn't change the index type if the data frame is empty, which means that resampling\n # would fail for an empty data frame. As there will be no row for median or mean , it is safe to just use the\n # original data frame to avoid this problem.\n\n # for external seeing calculating median and mean\n if not df1.empty:\n mean1_all = df1.resample(str(binning) + 'T').mean()\n else:\n mean1_all = df1.copy(deep=True)\n source1 = ColumnDataSource(mean1_all)\n mean_source1.data = source1.data\n\n if not df1.empty:\n median1_all = df1.resample(str(binning) + 'T').median()\n else:\n median1_all = df1.copy(deep=True)\n source = ColumnDataSource(median1_all)\n median_source1.data = source.data\n\n # calculate mean and median for ee50\n if not df2.empty:\n mean_all = df2.resample(str(binning) + 'T').mean()\n else:\n mean_all = df2.copy(deep=True)\n source3 = ColumnDataSource(mean_all)\n mean_source.data = source3.data\n\n if not df2.empty:\n median_all = df2.resample(str(binning) + 'T').median()\n else:\n median_all = df2.copy(deep=True)\n source4 = ColumnDataSource(median_all)\n median_source.data = source4.data\n\n #calculate mean and median for fwhm\n if not df2.empty:\n mean_all1 = df2.resample(str(binning) + 'T').mean()\n else:\n mean_all1 = df2.copy(deep=True)\n source4 = ColumnDataSource(mean_all)\n mean_source2.data = source4.data\n\n if not df2.empty:\n median_all = df2.resample(str(binning) + 'T').median()\n else:\n median_all = df2.copy(deep=True)\n source5 = ColumnDataSource(median_all)\n median_source2.data = source5.data\n\n # calculate difference for external seeing against fwhm and ee50\n dataframes = [mean1_all, mean_all]\n add_dataframes = pd.concat(dataframes, axis=1)\n add_dataframes.index.name = '_timestamp_'\n add_dataframes['difference'] = add_dataframes['seeing'] - add_dataframes['ee50']\n datasource2 = ColumnDataSource(add_dataframes)\n difference_source.data = datasource2.data\n\n dataframes = [mean1_all, mean_all1]\n add_dataframes = pd.concat(dataframes, axis=1)\n add_dataframes.index.name = '_timestamp_'\n add_dataframes['difference1'] = add_dataframes['seeing'] - add_dataframes['fwhm']\n datasource1 = ColumnDataSource(add_dataframes)\n difference_source1.data = datasource1.data\n\n # #difference using the median\n # dataframes2 = [median_all, median1_all]\n # add_dataframes2 = pd.concat(dataframes2, axis=1)\n # add_dataframes2.index.name = '_timestamp_'\n # add_dataframes2['difference2'] = add_dataframes2['seeing'] - add_dataframes2['ee50']\n # datasource2 = ColumnDataSource(add_dataframes2)\n # difference_source2.data = datasource2.data\n #\n # dataframes3 = [median_all, median1_all]\n # add_dataframes3 = pd.concat(dataframes3, axis=1)\n # add_dataframes3.index.name = '_timestamp_'\n # add_dataframes3['difference3'] = add_dataframes3['seeing'] - add_dataframes3['fwhm']\n # datasource3 = ColumnDataSource(add_dataframes3)\n # difference_source3.data = datasource3.data\n\n # plot labels\n p = figure(title=\"external vs internal seeing ({binning} minute bins)\".format(binning=binning), x_axis_type='datetime'\n , x_axis_label='datetime', y_axis_label='seeing',plot_width=1000, plot_height=500,tools=TOOLS)\n dif=figure(title='difference between average internal and external seeing ({binning} minute bins)'.format(binning=binning), x_axis_type='datetime',\n x_axis_label='datetime', y_axis_label='seeing',plot_width=1000, plot_height=500,tools=TOOLS)\n\n #plots\n # plots for external seeing\n p.circle(source=mean_source1, x='datetime',y='seeing', legend=\"external average\" ,fill_color=\"white\",color='green')\n p.line(source=median_source1, x='datetime',y='seeing', legend=\"external median\" ,color='blue')\n\n #plots showing median and mean for ee50 and fwhm\n p.circle(source=mean_source, x='_timestamp_', y='ee50', legend='ee50 average')\n p.circle(source=mean_source, x='_timestamp_', y='fwhm', legend='fwhm average', color='red', fill_color='white')\n\n p.line(source=median_source, x='_timestamp_', y='ee50', legend='ee50 median', color='green')\n p.line(source=median_source, x='_timestamp_', y='fwhm', legend='fwhm median', color='orange')\n\n #for difference\n dif.circle(source=difference_source, x='_timestamp_', y='difference', legend='ee50_mean difference', color='red')\n dif.circle(source=difference_source1, x='_timestamp_', y='difference1', legend='fwhm_mean difference', fill_color='green')\n\n #\n # dif.circle(source=difference_source2, x='_timestamp_', y='difference2', legend='ee50_median difference', fill_color='blue')\n # dif.circle(source=difference_source3, x='_timestamp_', y='difference3', legend='fwhm_median difference', color='orange')\n\n p.xaxis.formatter = date_formatter\n p.legend.location = \"top_left\"\n p.legend.click_policy=\"hide\"\n\n dif.xaxis.formatter = date_formatter\n dif.legend.click_policy=\"hide\"\n\n script, div = components(p)\n content1 = '<div>{script}{div}</div>'.format(script=script, div=div)\n\n script, div = components(dif)\n content2 = '<div>{script}{div}</div>'.format(script=script, div=div)\n\n return '{cont} {cont2}'.format(cont=content1,cont2=content2)", "def merge_stats(self, other):\n\n self[1] += other[1]\n self[2] = self[0] and min(self[2], other[2]) or other[2]\n self[3] = max(self[3], other[3])\n\n if self[3] == other[3]:\n self[4] = other[4]\n\n # Must update the call count last as update of the\n # minimum call time is dependent on initial value.\n\n self[0] += other[0]", "def merge(self, other: \"GraphSet\") -> None:\n if other.name != self.name:\n raise UnmergableGraphSetsException(\n f\"Unable to merge graph with name {other.name} into {self.name}\"\n )\n if other.version != self.version:\n raise UnmergableGraphSetsException(\n f\"Unable to merge graph with version {other.version} into {self.version}\"\n )\n self.start_time = min(self.start_time, other.start_time)\n self.end_time = max(self.end_time, other.end_time)\n self.resources += other.resources\n self._resolve_duplicates()\n self.errors += other.errors\n self.stats.merge(other.stats)", "def on_merge(self, to_be_merged, merge_result, context):\n pass", "def snapshot(self, snapshot_id):\r\n return self.connection.create_dbsnapshot(snapshot_id, self.id)", "def copy(self, other):\n assert isinstance(other, Snapshot)\n self.imp = other.imp", "def aggregate_global_cache(self, global_tt_summary_cache):\n\n # Merge only statistics tensor, if it is any other tensor we simply,\n # concatenate them.\n agg_fn_map = self._parameters.get_signature_to_agg_fn_map()\n signature_idx_map = self._signature_types()\n aggregation_result = []\n for signature, idx in sorted(signature_idx_map.items(),\n key=operator.itemgetter(1)):\n if signature not in agg_fn_map:\n raise RuntimeError('No aggregation function is defined for '\n 'signature %s.' % signature)\n # The dimensions of the statistics tensor is\n # num_cores x num_traced_tensors x num_signatures\n # value[:,:,idx] will return the portion of the tensor related\n # to signature.\n signature_tensor = global_tt_summary_cache[:, :, idx]\n # Merge it along the first (core) axis.\n agg_fn = agg_fn_map[signature]\n agg_tensor = agg_fn(signature_tensor, axis=0)\n aggregation_result.append(agg_tensor)\n # Merge results corresponding to different signatures\n\n merged_signatures = array_ops_stack.stack(aggregation_result)\n # merged_signatures has dimensions\n # num_signatures x num_traced_tensors, transpose it so that it\n # will match with the original structure\n # num_traced_tensors x num_signatures.\n transposed_signatures = array_ops.transpose(merged_signatures)\n # Expand 1 more dimension so that it will match with the expected\n # structure num_cores x num_traced_tensors x num_signatures.\n return array_ops.expand_dims(transposed_signatures, axis=0)", "def aggregate(self, batch_outs, batch_start=None, batch_end=None):\n raise NotImplementedError('Must be implemented in subclasses.')", "def _aggregate_metrics(metrics, aggfunc, base):\n return base.Struct(**_UNCOMPRESSED_METRICS)(\n left_side_bearing=aggfunc(_m.left_side_bearing for _m in metrics),\n right_side_bearing=aggfunc(_m.right_side_bearing for _m in metrics),\n character_width=aggfunc(_m.character_width for _m in metrics),\n character_ascent=aggfunc(_m.character_ascent for _m in metrics),\n character_descent=aggfunc(_m.character_descent for _m in metrics),\n character_attributes=0,\n )", "def build_summary(self):\n for k, v in self.metrics.items():\n tf.summary.scalar(k, v)\n \n self.summary_op = tf.summary.merge_all()", "def merge_data(agg_cases, lk_info, geolocation_data):\n merged_df = pd.merge(agg_cases, lk_info, left_on='IdLandkreis', right_on = 'Key')\n merged_df[\"RelativFall\"] = merged_df[\"AnzahlFall\"] / merged_df[\"Bev Insgesamt\"]\n merged_df[\"RelativTodesfall\"] = merged_df[\"AnzahlTodesfall\"] / merged_df[\"Bev Insgesamt\"]\n merged_df = pd.merge(merged_df, geolocation_data, left_on=\"Key\", right_on=\"cca_2\")\n return merged_df", "def merge_metadata(self):\n\n # Load merge metadata if necessary\n if not self._merges:\n self._merges = VersionedProperty(self.url, opts[\"prop\"])\n self._merges.load(self)\n\n return self._merges", "def update_snapshot_metadata(self, snapshot_id, **kwargs):\n put_body = json.dumps(kwargs)\n url = \"snapshots/%s/metadata\" % snapshot_id\n resp, body = self.put(url, put_body)\n body = json.loads(body)\n self.validate_response(schema.update_snapshot_metadata, resp, body)\n return rest_client.ResponseBody(resp, body)", "def reset_group_snapshot_status(self, context, gsnapshot, status):\n\n context.authorize(gsnap_action_policy.RESET_STATUS,\n target_obj=gsnapshot)\n field = {'updated_at': timeutils.utcnow(),\n 'status': status}\n gsnapshot.update(field)\n gsnapshot.save()", "def create_share_from_snapshot(\r\n self,\r\n context,\r\n share,\r\n snapshot,\r\n share_server=None):\r\n LOG.debug(\"Create share from snapshot.\")\r\n raise NotImplementedError()", "def memory_snapshot(tag, rank):\n GB = 1024 * 1024 * 1024\n MB = 1024 * 1024\n KB = 1024\n\n peak = dgl.partition.get_peak_mem() * KB\n mem = psutil.virtual_memory()\n avail = mem.available / MB\n used = mem.used / MB\n total = mem.total / MB\n\n mem_string = f\"{total:.0f} (MB) total, {peak:.0f} (MB) peak, {used:.0f} (MB) used, {avail:.0f} (MB) avail\"\n logging.debug(f\"[Rank: {rank} MEMORY_SNAPSHOT] {mem_string} - {tag}\")", "def merge(self, session, source_state, source_dict, dest_state,\n dest_dict, load, _recursive):\n\n pass", "def _update_aggregate_dataset(self, formula, new_dframe, name, groups,\n agg_dataset):\n # parse aggregation and build column arguments\n aggregation, new_columns = self.make_columns(\n formula, name, new_dframe)\n\n agg = Aggregator(self.dataset, self.dframe,\n groups, aggregation, name)\n new_agg_dframe = agg.update(agg_dataset, self, formula, new_columns)\n\n # jsondict from new dframe\n new_data = new_agg_dframe.to_jsondict()\n\n for merged_dataset in agg_dataset.merged_datasets:\n # remove rows in child from this merged dataset\n merged_dataset.remove_parent_observations(\n agg_dataset.dataset_id)\n\n # calculate updates on the child\n merged_calculator = Calculator(merged_dataset)\n call_async(merged_calculator.calculate_updates, merged_calculator,\n new_data, parent_dataset_id=agg_dataset.dataset_id)", "def snapshot_metadata(self, snapshot_metadata):\n if snapshot_metadata is None:\n raise ValueError(\"Invalid value for `snapshot_metadata`, must not be `None`\")\n\n self._snapshot_metadata = snapshot_metadata", "def _merge(acc: Dict[str, str], cur: Any) -> Dict[str, str]:\n parsed = _parse_feature(cur)\n acc[\"timestamp\"] = parsed[\"timestamp\"]\n acc[\"lat\"] = parsed[\"lat\"]\n acc[\"lon\"] = parsed[\"lon\"]\n key = parsed[\"property\"]\n val = parsed[\"value\"]\n\n acc[key] = val\n\n return acc", "def servicemanage_snapshot_glance_metadata_get(context, snapshot_id, session=None):\n if not session:\n session = get_session()\n\n return session.query(models.ServiceManageGlanceMetadata).\\\n filter_by(snapshot_id=snapshot_id).\\\n filter_by(deleted=False).all()", "def merge_both_tables():\n old = Table.read('data/data_table_cartesian_including_tims_stars_with_bg_ols_and_component_overlaps.fits')\n wanted = Table.read('data/scocen_candidates_300k_only_spatial_cut.fits')\n additional = Table.read('data/scocen_candidates_300k_only_spatial_cut_200k_to_determine_bg_ols.fits')\n\n d_old = dict(zip(old['source_id'], old['background_log_overlap']))\n d_add = dict(zip(additional['source_id'], additional['background_log_overlap']))\n d_old.update(d_add)\n dct = d_old\n\n ln_bg_ols = [dct[source_id] for source_id in wanted['source_id']]\n print\n len(ln_bg_ols), len(wanted)\n\n wanted['background_log_overlap'] = ln_bg_ols\n print\n wanted\n\n wanted.write('data/scocen_candidates_300k_only_spatial_cut.fits', overwrite=True, format='fits')", "def restore_from_snapshot(self, volume_id, snapshot_id):\r\n self.iscsi_svc.restoreFromSnapshot(snapshot_id, id=volume_id)", "def assemble_stats(lma_sum, mma_sum, hma_sum, peer_lma_sum, peer_mma_sum, peer_hma_sum):\n lma_pct = 0.0\n mma_pct = 0.0\n hma_pct = 0.0\n\n peer_lma_pct = 0.0\n peer_mma_pct = 0.0\n peer_hma_pct = 0.0\n\n stats = {}\n\n target_lar_total = lma_sum + mma_sum + hma_sum\n if target_lar_total:\n lma_pct = round(1.0 * lma_sum / target_lar_total, 3)\n mma_pct = round(1.0 * mma_sum / target_lar_total, 3)\n hma_pct = round(1.0 * hma_sum / target_lar_total, 3)\n maj_pct = round(mma_pct + hma_pct, 3)\n stats.update({\n 'lma': lma_sum, \n 'lma_pct': lma_pct, \n 'mma': mma_sum,\n 'mma_pct': mma_pct,\n 'hma': hma_sum,\n 'hma_pct': hma_pct,\n 'maj_pct': maj_pct,\n 'lar_total': target_lar_total\n })\n else:\n stats.update({\n 'lar_total': 0,\n 'lma': 0, \n 'lma_pct': 0, \n 'mma': 0,\n 'mma_pct': 0,\n 'hma': 0,\n 'hma_pct': 0\n })\n #assemble peer data\n peer_lar_total = peer_lma_sum + peer_mma_sum + peer_hma_sum\n if peer_lar_total:\n peer_lma_pct = round(1.0 * peer_lma_sum / peer_lar_total, 3)\n peer_mma_pct = round(1.0 * peer_mma_sum / peer_lar_total, 3)\n peer_hma_pct = round(1.0 * peer_hma_sum / peer_lar_total, 3)\n peer_maj_pct = round(peer_mma_pct + peer_hma_pct, 3)\n stats.update({\n 'peer_lma': peer_lma_sum, \n 'peer_lma_pct': peer_lma_pct, \n 'peer_mma': peer_mma_sum,\n 'peer_mma_pct': peer_mma_pct,\n 'peer_hma': peer_hma_sum,\n 'peer_hma_pct': peer_hma_pct,\n 'peer_maj_pct': peer_maj_pct,\n 'peer_lar_total': peer_lar_total\n })\n else:\n stats.update({\n 'peer_lma': 0,\n 'peer_lma_pct': 0, \n 'peer_mma': 0, \n 'peer_mma_pct': 0,\n 'peer_hma': 0,\n 'peer_hma_pct': 0,\n 'peer_lar_total': 0\n })\n odds_lma = odds_ratio(lma_pct, peer_lma_pct)\n odds_mma = odds_ratio(mma_pct, peer_mma_pct)\n odds_hma = odds_ratio(hma_pct, peer_hma_pct)\n odds_maj = odds_ratio(mma_pct+hma_pct, peer_mma_pct+peer_hma_pct)\n stats.update({\n 'odds_lma':odds_lma,\n 'odds_mma':odds_mma,\n 'odds_hma':odds_hma,\n 'odds_maj':odds_maj\n })\n return stats", "def reset_metric_stats(self):\n\n self.__stats_table = {}", "def _merge(self):\n raise NotImplementedError", "def merge_summaries_old(root_dir,output_file=None):\n #\n sumfiles = glob.glob(f\"{root_dir}/**/*smry.txt\",recursive=True)\n nsums = len(sumfiles)\n print (f\"Found {nsums} summary files in {root_dir}\")\n #\n with tempfile.NamedTemporaryFile(mode='w') as fp:\n for i in range(nsums):\n sumfile = sumfiles[i]\n iobs = os.path.basename(sumfile)[0:10]\n with open(sumfile,'r') as sfile:\n fp.write(sfile.read())\n #\n # now read as pandas dataframe\n #\n colnames = [\"rev\",\"obsid\",\"expid\",\"mode\",\"filt\",\"tstart\",\"tend\",\"texpo\",\\\n \"mvcratio\", # (a rough measure of the ratio of counts in the MnKa versus continuum)\n \"qboxt0\",\"qboxt1\",\"qboxt2\",\"qboxt3\", # x 4 (electronics quadrant box temperatures)\n \"ndisclin_mean0\",\"ndisclin_mean1\",\"ndisclin_mean2\",\"ndisclin_mean3\", #x 4\n \"mipsel0\",\"mipsel1\",\"mipsel2\",\"mipsel3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"maxmip0\",\"maxmip1\",\"maxmip2\",\"maxmip3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"ndisclin_med0\",\"ndisclin_med1\",\"ndisclin_med2\",\"ndisclin_med3\", #median x 4\n \"ndisclin_std0\",\"ndisclin_std1\",\"ndisclin_std2\",\"ndisclin_std3\"] #, stddev x 4\n #\n df = pd.read_csv(fp.name,delimiter='\\s+',header=None,skip_blank_lines=True,names=colnames)\n #\n # now calculate the time_delta, the difference in years from observation start and 2000-01-01\n #\n stime = [(datetime.strptime(x,\"%Y-%m-%dT%H:%M:%S\")-time0).total_seconds()/(365.0*24.0*3600.0) for x in df.tstart]\n df.insert(6,\"delta_time\",pd.Series(stime,index=df.index))\n #\n print (f'Last observation t={df.delta_time.max():.2f} years')\n if (output_file is not None):\n df.to_csv(output_file)\n fp.close()\n return df", "def take_snapshot(self):\r\n self.snapshot = self.max_gain, self.__dup_array(), copy.copy(self.free_cell_list)" ]
[ "0.72327065", "0.5681518", "0.5414538", "0.509253", "0.5089406", "0.50668776", "0.50477487", "0.50349045", "0.5023128", "0.50017947", "0.49228266", "0.48627433", "0.47599393", "0.4753302", "0.47469756", "0.47447816", "0.47401235", "0.47382542", "0.47220156", "0.4715443", "0.47067896", "0.46773222", "0.46512926", "0.46499857", "0.46487054", "0.4639972", "0.46301946", "0.46147826", "0.4611982", "0.45998287", "0.4594962", "0.45949453", "0.45873183", "0.4582951", "0.45782727", "0.4566743", "0.45656076", "0.45653233", "0.45359218", "0.45338425", "0.45331046", "0.45315468", "0.45269734", "0.45213708", "0.4514455", "0.4512558", "0.44928068", "0.4484109", "0.44809842", "0.44753063", "0.44742796", "0.446608", "0.44623882", "0.44472632", "0.4441163", "0.44322616", "0.4432153", "0.44205412", "0.441201", "0.44098324", "0.4398728", "0.43936926", "0.43879446", "0.43755615", "0.43737054", "0.4368965", "0.43656215", "0.4365403", "0.43614382", "0.43600228", "0.43571883", "0.43446073", "0.43430367", "0.43380144", "0.43220297", "0.4321416", "0.43122587", "0.42950973", "0.42931446", "0.429138", "0.42909068", "0.4288576", "0.42862424", "0.428322", "0.42792147", "0.4274481", "0.42743865", "0.42719686", "0.42716384", "0.42693833", "0.42676225", "0.426485", "0.42610416", "0.42568946", "0.4256563", "0.42537946", "0.425317", "0.42477438", "0.42474803", "0.42392552" ]
0.7712152
0
Merges in a set of custom metrics. The metrics should be provide as an iterable where each item is a tuple of the metric name and the accumulated stats for the metric.
def merge_custom_metrics(self, metrics): if not self.__settings: return for name, other in metrics: key = (name, '') stats = self.__stats_table.get(key) if not stats: self.__stats_table[key] = other else: stats.merge_stats(other)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_metrics(self, metrics):\n for i, metric in enumerate(self.config.metrics):\n tf.summary.scalar(metric, metrics[i])", "def aggregate(all_metrics, reducer, suffix):\n # Collect metric separately\n separated_metrics = {} # type: dict[frozenset, list[dict]]\n for el in all_metrics:\n key = frozenset(el[\"metric\"][\"dimensions\"].items())\n if key not in separated_metrics:\n separated_metrics[key] = [el]\n else:\n separated_metrics[key].append(el)\n\n # Collect all dimensions\n dims = {}\n for metric_dims in separated_metrics.keys():\n for prop, val in dict(metric_dims).iteritems():\n if prop in dims:\n dims[prop].add(val)\n else:\n dims[prop] = set(val)\n\n # Sort each metric\n for _, metric in separated_metrics.iteritems():\n metric.sort(key=lambda v: v[\"metric\"][\"timestamp\"])\n\n separated_metrics = sorted(separated_metrics.values(), key=len)\n separated_metrics.reverse()\n\n # Compute the new values\n new_values = []\n all_timestamps = map(\n lambda l: map(\n lambda x: x[\"metric\"][\"timestamp\"], l),\n separated_metrics)\n metric_count = len(separated_metrics)\n for index in range(0, len(separated_metrics[0])):\n new_value = reducer[0](\n separated_metrics[0][index][\"metric\"][\"value\"],\n metric_count)\n new_timestamp = separated_metrics[0][index][\"metric\"][\"timestamp\"]\n for metric_index in range(1, metric_count):\n new_value = reducer[1](new_value, helpers.interpolate(\n new_timestamp,\n separated_metrics[metric_index],\n all_timestamps[metric_index]\n ), metric_count)\n new_values.append((new_timestamp, new_value))\n\n # Aggregate the other details:\n metric_name = separated_metrics[0][0][\"metric\"][\"name\"] + suffix\n meta = separated_metrics[0][0][\"meta\"]\n new_metrics = [\n helpers.create_agg_metric(\n metric_name,\n meta,\n dims,\n val[0],\n val[1]\n ) for val in new_values\n ]\n return new_metrics", "def record_custom_metrics(self, metrics):\n\n if not self.__settings:\n return\n\n for name, value in metrics:\n self.record_custom_metric(name, value)", "def _build_metric_list_to_collect(self, additional_metrics):\n metrics_to_collect = {}\n\n # Defaut metrics\n for default_metrics in self.DEFAULT_METRICS.itervalues():\n metrics_to_collect.update(default_metrics)\n\n # Additional metrics metrics\n for option in additional_metrics:\n additional_metrics = self.AVAILABLE_METRICS.get(option)\n if not additional_metrics:\n if option in self.DEFAULT_METRICS:\n self.log.warning(\n u\"`%s` option is deprecated.\"\n u\" The corresponding metrics are collected by default.\", option\n )\n else:\n self.log.warning(\n u\"Failed to extend the list of metrics to collect:\"\n u\" unrecognized `%s` option\", option\n )\n continue\n\n self.log.debug(\n u\"Adding `%s` corresponding metrics to the list\"\n u\" of metrics to collect.\", option\n )\n metrics_to_collect.update(additional_metrics)\n\n return metrics_to_collect", "def _aggregate_metrics(metrics, aggfunc, base):\n return base.Struct(**_UNCOMPRESSED_METRICS)(\n left_side_bearing=aggfunc(_m.left_side_bearing for _m in metrics),\n right_side_bearing=aggfunc(_m.right_side_bearing for _m in metrics),\n character_width=aggfunc(_m.character_width for _m in metrics),\n character_ascent=aggfunc(_m.character_ascent for _m in metrics),\n character_descent=aggfunc(_m.character_descent for _m in metrics),\n character_attributes=0,\n )", "def add_stats(self):\n units = self.get_unit_map()\n for metric in self.raw_metrics:\n unit, metric_type = units.get(metric, (DEFAULT_UNIT, DEFAULT_TYPE))\n if metric_type == \"counter\":\n # Unit/Second\n unit = \"/\".join((unit, \"Second\"))\n self.add_derive_value(metric, unit, self.raw_metrics[metric], rate=True)\n else:\n self.add_gauge_value(metric, unit, self.raw_metrics[metric])", "def aggregate_metrics(metrics):\n if len(metrics) == 1:\n return metrics[0]\n else:\n agg_metrics = metrics[0]\n for metric in agg_metrics.keys():\n vals = [x[metric] for x in metrics]\n agg_metrics[metric] = [np.mean(vals), np.std(vals)]\n return agg_metrics", "def merge_measurements(measurements_list: List[Measurements]) -> \\\n Tuple[Measurements, List[MetricName]]:\n summed_metrics: Measurements = {}\n\n all_metrics_names = set() # Sum of set of names.\n for measurements in measurements_list:\n all_metrics_names.update(measurements.keys())\n\n for metric_name in all_metrics_names:\n if metric_name in METRICS_METADATA:\n\n if METRICS_METADATA[metric_name].type == MetricType.GAUGE:\n operation = lambda values: sum(values) / len(values) # noqa\n else:\n assert METRICS_METADATA[metric_name].type == MetricType.COUNTER\n operation = sum\n\n else:\n log.debug('By default, unknown metric %r uses \"sum\" as merge operation.', metric_name)\n operation = sum\n\n summed_metrics[metric_name] = operation(\n [measurements[metric_name] for measurements in measurements_list\n if metric_name in measurements])\n\n return summed_metrics", "def optimize_metrics(self,\n metrics: list = None,\n verbose: bool = True):\n\n if metrics is None:\n metrics = self._supported_metrics\n else:\n metrics = [metric.lower() for metric in metrics]\n assert all(metric in self._supported_metrics for metric in metrics)\n for i in metrics:\n super(ThresholdOptimizer, self).__getattribute__(f'get_best_{i}_metrics')(verbose=verbose)", "def update(self, current_iter, *metrics, **named_metrics):\n\n # Same order as __init__() in python>=3.6\n if len(metrics) > 0:\n for key, metric in zip(self.metrics.keys(), metrics):\n self.metrics[key].append((current_iter, metric))\n \n # Random order with names\n elif len(named_metrics) > 0:\n for name, metric in named_metrics.item():\n self.metrics[name].append((metric))\n\n else:\n raise ValueError(\"No valid value to update losses\")", "def add_metrics(self,\n metrics_: Optional[Dict[str, Any]] = None,\n add_to_child_: bool = True,\n **kwargs: Any) -> None:\n if self._child_stack and add_to_child_:\n self._child_stack[-1].add_metrics(metrics_, **kwargs)\n else:\n def collect(target: Dict[str, Any]):\n if metrics_:\n for key, val in metrics_.items():\n key = stage_type.add_metric_prefix(key)\n target[key] = to_number_or_numpy(val)\n if kwargs:\n for key, val in kwargs.items():\n key = stage_type.add_metric_prefix(key)\n target[key] = to_number_or_numpy(val)\n\n stage_type = self._stage.type\n if self._stage.batch.is_active:\n collect(self._batch_metrics)\n elif self._stage.epoch is not None and self._stage.epoch.is_active:\n collect(self._epoch_metrics)\n else:\n collect(self._stage_metrics)\n self._stage.push_metrics(self._stage_metrics)", "def _add_metrics_to_metrics_provider(cls, mp, metrics):\n providers_info = cls._METRICS_PROVIDER_INFO[mp.type][mp.namespace][\"providers\"]\n provided_metrics = next(\n provider_info[\"provided_metrics\"]\n for provider_info in providers_info\n if provider_info[\"name\"] == mp.name\n )\n\n # Check if the provided metrics are equal to the metrics\n num_metrics = len(metrics)\n if len(provided_metrics) != num_metrics:\n raise ValueError(\n f\"Found {len(provided_metrics)} metrics for metrics provider \"\n f\"{mp.name}. Expected {num_metrics}.\"\n )\n\n # Check what type of provider is used at the moment\n if mp.type == MetricsProviderType.STATIC:\n valued_metric_class = StaticMetric\n elif mp.type == MetricsProviderType.PROMETHEUS:\n valued_metric_class = PrometheusMetric\n else:\n raise NotImplementedError()\n # Iterate through the provided metrics\n valued_metrics = []\n for i, (metric_name, metric_value) in enumerate(provided_metrics):\n metric = metrics[i]\n if metric.mp_metric_name != metric_name:\n msg = (\n f\"Unexpected name {metric.mp_metric_name}. Expected: {metric_name}.\"\n )\n raise ValueError(msg)\n valued_metric = valued_metric_class(metric, metric_value)\n valued_metrics.append(valued_metric)\n mp.set_valued_metrics(valued_metrics)", "def calc_metric(output, metrics):\n score = []\n for metric in metrics:\n metric_mod = __import__(\"sklearn.metrics\", fromlist=[metric])\n metric_func = getattr(metric_mod, metric)\n score.append(metric_func(output[0], output[1]))\n return score, output", "def register_additional_metric_ops(\n self, metric_ops: Dict[str, Tuple[tf.Tensor, tf.Tensor]]) -> None:\n for metric_name, (value_op, update_op) in metric_ops.items():\n if metric_name in self._metric_names:\n raise ValueError('tried to register new metric with name %s, but a '\n 'metric with that name already exists.' % metric_name)\n self._metric_names.append(metric_name)\n self._metric_value_ops.append(value_op)\n self._metric_update_ops.append(update_op)\n\n # Update metric variables incrementally with only the new elements in the\n # metric_variables collection.\n collection = self._graph.get_collection(\n tf.compat.v1.GraphKeys.METRIC_VARIABLES)\n collection = collection[len(self._metric_variable_nodes):]\n\n # Note that this is a node_list - it's not something that TFMA\n # configures, but something that TF.Learn configures.\n #\n # As such, we also use graph.get_tensor_by_name directly, instead of\n # TFMA's version which expects names encoded by TFMA.\n for node in collection:\n self._metric_variable_nodes.append(node)\n with self._graph.as_default():\n placeholder = tf.compat.v1.placeholder(\n dtype=node.dtype, shape=node.get_shape())\n self._metric_variable_placeholders.append(placeholder)\n self._metric_variable_assign_ops.append(\n tf.compat.v1.assign(node, placeholder))\n\n with self._graph.as_default():\n self._all_metric_variable_assign_ops = tf.group(\n *self._metric_variable_assign_ops)\n self._all_metric_update_ops = tf.group(*self._metric_update_ops)\n self._reset_variables_op = tf.compat.v1.local_variables_initializer()\n self._session.run(self._reset_variables_op)\n\n self._perform_metrics_update_fn = self._session.make_callable(\n fetches=self._all_metric_update_ops,\n feed_list=self._perform_metrics_update_fn_feed_list)", "def generateDerivedMetrics(kernelMetrics, statistics, throughputMetrics = {}, countMetrics = {}, combinedMetrics = {}):\n\n # combine single metrics \n for combinedMetric in combinedMetrics:\n for kernel in kernelMetrics:\n logging.debug(\"Combining metrics for kernel {}\".format(kernel))\n # iterate over each run, take the number of runs to be\n # the length of the first source metric\n if combinedMetrics[combinedMetric][0] in kernelMetrics[kernel]:\n combinedMetricCounts = []\n sourceMetricMissing = False\n # go through each run\n for run in range(0, len(kernelMetrics[kernel][ combinedMetrics[combinedMetric][0] ])):\n\n combinedMetricRunCount = 0\n # take all the source metrics and add them into the\n # combined metric\n for sourceMetric in combinedMetrics[combinedMetric]:\n if sourceMetric in kernelMetrics[kernel]:\n # TODO delete once debugged print(\"runs of {} {}\".format(sourceMetric, kernelMetrics[kernel][sourceMetric]))\n combinedMetricRunCount = combinedMetricRunCount + kernelMetrics[kernel][sourceMetric][run]\n else:\n sourceMetricMissing = True\n logging.info(\"Source metric {} missing for combined metric {}, combined metric will not be\"\n \"added\".format(sourceMetric, combinedMetric))\n # append this run ot the end of the list\n combinedMetricCounts.append(combinedMetricRunCount)\n if not sourceMetricMissing:\n kernelMetrics[kernel][combinedMetric] = combinedMetricCounts\n\n # take throughputs and convert them to counts\n # doesn't use averages since that can skew results\n for throughputMetricName, countMetricName in zip(throughputMetrics, countMetrics):\n for kernel in kernelMetrics:\n logging.debug(\"Generating count metrics for {} in kernel {}\".format(throughputMetricName, kernel))\n if throughputMetricName in kernelMetrics[kernel]:\n counts = []\n for run in range(0, len(kernelMetrics[kernel][throughputMetricName])):\n count = kernelMetrics[kernel][throughputMetricName][run] * kernelMetrics[kernel][\"Duration\"][run]\n counts.append(count)\n kernelMetrics[kernel][countMetricName] = counts", "def summarize_metrics(metrics):\n summarized = {}\n for k in metrics:\n if k.endswith('mse'):\n summarized[k[:-3] + 'rmse'] = np.sqrt(np.mean(metrics[k]))\n elif k.startswith('err'):\n summarized[k + '_mean'] = np.mean(metrics[k])\n summarized[k + '_rmse'] = np.sqrt(np.mean(metrics[k]**2))\n elif k.endswith('nomean'):\n summarized[k] = metrics[k]\n else:\n summarized[k] = np.mean(metrics[k])\n\n return summarized", "def get_metrics(self, add_metrics={}):\n tot_px_cnt = self.res * int(self.tensors['samples_evaluated'][0])\n\n if self.debug:\n sum_per_class = self.tensors['TP'] + self.tensors['TN'] + self.tensors['FP'] + self.tensors['FN']\n unique = sum_per_class.unique()\n assert len(unique) == 1, 'Expect to observe the exact same number for all classes.'\n assert unique[0] == self.tensors['PX_CNT'].sum() == tot_px_cnt, 'Expect exactly one type of prediction per pixel.'\n\n mask_non_observed = (self.tensors['PX_CNT']).bool()\n mask_bg = self.tensors['M']\n mask_combined = (self.tensors['M'] * mask_non_observed).bool() # in PyTorch 1.4 no logical AND\n\n if self.debug:\n assert mask_combined.sum() <= mask_bg.sum()\n assert mask_combined.sum() <= mask_non_observed.sum()\n \n accuracies = (self.tensors['TP'] + self.tensors['TN']) / tot_px_cnt\n acc = torch.mean(accuracies[mask_combined])\n acc_bg_included = torch.mean(accuracies[mask_non_observed])\n\n IoUs = self.tensors['TP'] / (tot_px_cnt - self.tensors['TN']) # per class: I/U, U = sum(TP,FP,FN) = all - TN\n mIoU = torch.mean(IoUs[mask_combined])\n mIoU_bg_included = torch.mean(IoUs[mask_non_observed])\n\n if self.debug:\n if torch.cuda.is_available():\n for i in [accuracies, acc, acc_bg_included, IoUs, mIoU, mIoU_bg_included]:\n assert i.is_cuda\n\n results = OrderedDict()\n\n for i in ['acc','mIoU']:\n for j in ['','_bg_included']:\n results[ i + j + '_' + self.fold ] = float(eval(i+j+'.cpu()'))\n\n for i in range(self.tensors['TP'].shape[0]):\n results['IoU_class_' + str(i) + '_' + self.fold] = float(IoUs[i].cpu())\n results['acc_class_' + str(i) + '_' + self.fold] = float(accuracies[i].cpu())\n\n if self.debug:\n for k in results:\n if isinstance(results[k], float) and not math.isnan(results[k]):\n # don't apply check to nans and str; we don't use exactly 1 due to smaller rounding error\n assert results[k] <= 1.0001, f'Failure for {k,results[k],type(results[k])}: any metric derived from the confusion matrix should be <= 1.'\n\n #for t in self.tensors:\n # results[t + '_' + self.fold] = self.tensors[t].cpu()\n\n if add_metrics:\n for k in add_metrics:\n results[k + '_' + self.fold] = float(add_metrics[k])\n\n return results", "def metrics_group():", "def merge_accumulators(self, accumulators):\n raise NotImplementedError", "def compute_metrics(self, x, extra=None):\n if self.__metrics is None and extra is None:\n return None\n\n ret = {}\n if self.__metrics is not None:\n for m in self.__metrics:\n ret[m.name] = self._mdmetric(x, m)\n\n if extra is not None and extra.name not in ret:\n ret[extra.name] = self._mdmetric(x, extra)\n\n return ret", "def _update_metric(\n metrics: List[mlflow.entities.Metric], dataset: MetricsDict = {}\n ) -> MetricsDict:\n for metric in metrics:\n metric_dict = {\"step\": metric.step, \"value\": metric.value}\n if metric.key in dataset:\n if isinstance(dataset[metric.key], list):\n dataset[metric.key].append(metric_dict)\n else:\n dataset[metric.key] = [dataset[metric.key], metric_dict]\n else:\n dataset[metric.key] = metric_dict\n return dataset", "def calculate_metrics(jobs, metrics_names):\n metrics_def_dict = {mn: {'metric': mn.split('_')[0], 'agg': mn.split('_')[1], 'data': [], 'value': -1} for mn in metrics_names}\n\n for job in jobs:\n if job['category'] == 'run' and job['jobstatus'] == 'finished':\n for mn, mdata in metrics_def_dict.items():\n if 'per' in mdata['metric']:\n if mdata['metric'].split('per')[0] in job and mdata['metric'].split('per')[1] in job and job[mdata['metric'].split('per')[1]] > 0:\n mdata['data'].append(job[mdata['metric'].split('per')[0]]/(1.0*job[mdata['metric'].split('per')[1]]))\n elif mdata['metric'] in job and job[mdata['metric']]:\n mdata['data'].append(job[mdata['metric']])\n\n for mn, mdata in metrics_def_dict.items():\n if 'avg' in mdata['agg']:\n mdata['value'] = sum(mdata['data'])/(1.0*len(mdata['data'])) if len(mdata['data']) > 0 else -1\n if 'sum' in mdata['agg']:\n mdata['value'] = sum(mdata['data'])\n\n metrics = {}\n for mn, mdata in metrics_def_dict.items():\n if mdata['value'] > 0:\n if 'percent' in mdata['agg']:\n metrics[mn] = round(mdata['value'] * 100.0, 2)\n else:\n metrics[mn] = round(mdata['value'], 2)\n\n return metrics", "def collect_stats(self, cursor):\n metrics = self.config.get('metrics', DEFAULT_METRICS)\n if isinstance(metrics, str):\n if metrics == \"all\":\n # puffer_pool_status is only for 5.5, so we ignore that by default\n metrics = CATEGORIES.keys()\n metrics.remove('buffer_pool_stats')\n else:\n # support comma-separated list\n metrics = re.split(\"\\s*,\\s*\", metrics)\n\n self.logger.debug(\"metrics to collect: %s\" % \", \".join(metrics))\n for cat in metrics:\n if cat in CATEGORIES:\n self.add_category_stats(cat, cursor)\n else:\n self.logger.warning(\"%s is not a valid metric category\" % cat)\n\n if 'newrelic' in metrics:\n self.derive_newrelic_stats()", "def compute_additional_metrics(metric_summary: pd.DataFrame, metrics: str_t, pos_label: str):\n metric_slice = metric_summary[[\"true_labels_folds\", \"predicted_labels_folds\"]].copy()\n metric_out = {}\n\n # ensure list\n if isinstance(metrics, str):\n metrics = [metrics]\n\n for metric in metrics:\n score_funcs = dict(getmembers(sklearn.metrics))\n if metric in score_funcs:\n score_func = score_funcs[f\"{metric}\"]\n elif f\"{metric}_score\" in score_funcs:\n score_func = score_funcs[f\"{metric}_score\"]\n metric = f\"{metric}_score\" # noqa: PLW2901\n else:\n raise ValueError(f\"Metric '{metric}' not found.\")\n metric_out[metric] = metric_slice.apply(_apply_score, args=(score_func, pos_label), axis=1)\n metric_out = pd.concat(metric_out, names=[\"score\", \"folds\"], axis=1)\n\n metric_out = metric_out.stack([\"score\", \"folds\"])\n metric_out = metric_out.groupby(metric_out.index.names[:-1]).agg(\n [(\"mean\", lambda x: np.mean), (\"std\", lambda x: np.std(x))] # noqa: ARG005\n )\n\n metric_out = metric_out.unstack(\"score\").sort_index(axis=1, level=\"score\")\n metric_out.columns = metric_out.columns.map(\"_test_\".join)\n metric_summary = metric_summary.join(metric_out)\n\n return metric_summary", "def evaluate_with_metrics(self, dataset, metrics, *args, **kwargs):\n\n utils.assert_raise(isinstance(metrics, dict), ValueError,\n '\"metrics\" must be a dict with metric_name -> metric_function')\n result = dict()\n\n for sample in dataset:\n output = self.predict(sample)\n\n for key, call in metrics.items():\n holder = result.get(key, list())\n holder.append(call(output, sample))\n\n result[key] = holder\n\n return result", "def add_metrics(self, metric_dict: dict):\n self.metric_dict.update(metric_dict)", "def send_metrics(self):\n metrics = self.get_metrics()\n if not metrics:\n return\n\n for mkey, metric in metrics.items():\n for mname, mval in metric.items():\n try:\n self.agent.record_custom_metric(self.convert_metric_name(mkey, mname), mval, None)\n except Exception as e:\n print_(e)", "def metrics(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ElastigroupMultipleMetricsMetricArgs']]]]:\n return pulumi.get(self, \"metrics\")", "def metrics(self, metrics):\n\n self._metrics = metrics", "def reduce_metrics(cls, logging_outputs: List[Dict[str, Any]]) -> None:\n loss_sum = sum(log.get('loss', 0) for log in logging_outputs)\n ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)\n sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)\n nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)\n\n metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3)", "def unstack_metrics(**metrics):\n if len(metrics) != 1:\n raise ValueError(\"Stacked metrics dict should have one element, got \"\n \"{}\".format(len(metrics)))\n names_stacked = list(metrics.keys())[0]\n values_stacked = metrics[names_stacked]\n names = names_stacked.split(\"!\")\n values = tf.unstack(values_stacked, axis=1)\n return {name: tf.metrics.mean(value) for name, value in\n zip(names, values)}", "def summerize_adapter_metrics(parsed_metrics: Dict[int, dict]) -> Dict[Tuple[str, str], dict]:\n\n summarized_metrics = {}\n for lane in parsed_metrics:\n # Iterate over all samples in lane\n summarized_metrics[lane] = summarized_metrics.get(lane, {})\n for value in parsed_metrics[lane].values():\n sample_id = value.get(\"Sample_ID\")\n summarized_metrics[lane][sample_id] = summarized_metrics[lane].get(sample_id, value)\n summarized_metrics[lane][sample_id][\n \"R\" + value.get(\"ReadNumber\") + \"_SampleBases\"\n ] = value.get(\"SampleBases\")\n\n return summarized_metrics", "def calculate_metrics(metrics_data: List[Tuple[Metric, DataType]]) -> List[float]:\n pass", "def merge_accumulators(self, accumulators):\n accumulator = WordOccurrenceAccumulator(self.relevant_ids, self.dictionary)\n for other_accumulator in accumulators:\n accumulator.merge(other_accumulator)\n # Workers do partial accumulation, so none of the co-occurrence matrices are symmetrized.\n # This is by design, to avoid unnecessary matrix additions/conversions during accumulation.\n accumulator._symmetrize()\n logger.info(\"accumulated word occurrence stats for %d virtual documents\", accumulator.num_docs)\n return accumulator", "def report_metrics(prefix, metrics):\n series = []\n\n now = time.time()\n for key, value in metrics.iteritems():\n metric = '{prefix}.{key}'.format(prefix=prefix, key=key)\n point = [(now, value)]\n series.append({'metric':metric, 'points':point})\n\n if len(series) > 0:\n print u\"Sending {}\".format(series)\n dog_http_api.metrics(series)", "def get_metrics(self, objs_metrics):\n d = {}\n _S = DiffStatus\n\n for status in _S.iter():\n d[status] = [dict(obj_m) for obj_m in objs_metrics if obj_m['status'] == status]\n\n count_a_only = len(d.get(_S.deleted, []))\n count_b_only = len(d.get(_S.added, []))\n count_modified = len(d.get(_S.modified, []))\n count_unchanged = len(d.get(_S.unchanged, []))\n count_common = count_modified + count_unchanged\n\n count_a = count_common + count_a_only\n count_b = count_common + count_b_only\n\n d['count'] = {\n 'a': count_a,\n 'b': count_b,\n 'a_only': count_a_only,\n 'b_only': count_b_only,\n 'modified': count_modified,\n 'unchanged': count_unchanged,\n }\n\n d['summary'] = {\n _S.added: {\n 'count': count_b_only,\n 'relative_to': {\n 'a': count_b_only / count_a,\n 'b': count_b_only / count_b,\n }\n },\n _S.deleted: {\n 'count': count_a_only,\n 'relative_to': {\n 'a': count_a_only / count_a,\n 'b': count_a_only / count_b,\n },\n },\n _S.modified: {\n 'count': count_modified,\n 'relative_to': {\n 'a': count_modified / count_a,\n 'b': count_modified / count_b,\n }\n },\n _S.unchanged: {\n 'count': count_unchanged,\n 'relative_to': {\n 'a': count_unchanged / count_a,\n 'b': count_unchanged / count_b,\n }\n },\n }\n\n return d", "def set_metrics(self, metrics: List[Callable]) -> None:\n self.metrics = metrics", "def _evaluate(dataset: dict, name: str, metrics=None):\n if metrics is None:\n metrics = ['Accuracy', 'AUROC', 'AUPRC', 'Precision', 'Recall', 'F1', 'F2']\n measures = [dataset[metric] for metric in metrics]\n measures.insert(0, name)\n return measures", "def collect(self): # pylint: disable=no-self-use\n start = time.time()\n\n if \"jobs\" in PLUGIN_SETTINGS and PLUGIN_SETTINGS[\"jobs\"]:\n for metric in metric_jobs():\n yield metric\n\n if \"models\" in PLUGIN_SETTINGS:\n for metric in metric_models(PLUGIN_SETTINGS[\"models\"]):\n yield metric\n\n # --------------------------------------------------------------\n # Extras Function defined in configuration.py or the Regristry\n # # --------------------------------------------------------------\n if \"extras\" in PLUGIN_SETTINGS:\n for metric in collect_extras_metric(PLUGIN_SETTINGS[\"extras\"]):\n yield metric\n\n for metric in collect_extras_metric(__REGISTRY__):\n yield metric\n\n gauge = GaugeMetricFamily(\n \"nautobot_app_metrics_processing_ms\", \"Time in ms to generate the app metrics endpoint\"\n )\n duration = time.time() - start\n gauge.add_metric([], format(duration * 1000, \".5f\"))\n yield gauge", "def format_metrics(metrics, split):\n result = format_partial_metrics(metrics, split)\n result += '\\n'\n result += format_partial_metrics(metrics, split, extra='_r')\n return result", "def add_metrics(_dict):\n for key, itr in _dict.items():\n if key not in self.metric_cols:\n self.metric_cols.append(key)", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def _create_metric_sum(a, b):\n metric_sum = GridSearchClassificationMetrics()\n metric_sum.accuracy = a.accuracy + b.accuracy\n metric_sum.precision = a.precision + b.precision\n metric_sum.f_measure = a.f_measure + b.f_measure\n metric_sum.recall = a.recall + b.recall\n metric_sum.confusion_matrix = a.confusion_matrix + b.confusion_matrix\n return metric_sum", "def set_metrics(metric_dict, cd_loss, cd_corrects, cd_report):\n metric_dict['cd_losses'].append(cd_loss.item())\n metric_dict['cd_corrects'].append(cd_corrects.item())\n metric_dict['cd_precisions'].append(cd_report[0])\n metric_dict['cd_recalls'].append(cd_report[1])\n metric_dict['cd_f1scores'].append(cd_report[2])\n\n return metric_dict", "def apply_metrics(x):\n d = {}\n d[\"custom_metric\"] = custom_metric(\n x[\"actuals\"], x[\"forecast\"], x[\"avg_vol\"].values[0]\n )\n d[\"uncertainty_metric\"] = uncertainty_metric(\n x[\"actuals\"], x[\"upper_bound\"], x[\"lower_bound\"], x[\"avg_vol\"].values[0]\n )\n\n return pd.Series(d, index=[\"custom_metric\", \"uncertainty_metric\"])", "def _default_metrics(self):\n\n def _streaming_auc_with_class_id_label(predictions, labels, weights=None):\n indicator_labels = _class_id_labels_to_indicator(\n labels, num_classes=self.logits_dimension)\n return _streaming_auc(predictions, indicator_labels, weights)\n\n loss_key = _summary_key(self.head_name, metric_key.MetricKey.LOSS)\n accuracy_key = _summary_key(self.head_name, metric_key.MetricKey.ACCURACY)\n auc_key = _summary_key(self.head_name, metric_key.MetricKey.AUC)\n metrics = {\n loss_key:\n _weighted_average_loss_metric_spec(\n self._loss_fn, prediction_key.PredictionKey.LOGITS,\n self._label_name, self._weight_column_name),\n # TODO(b/29366811): This currently results in both an \"accuracy\" and an\n # \"accuracy/threshold_0.500000_mean\" metric for binary classification.\n accuracy_key:\n self._metric_spec(metrics_lib.streaming_accuracy,\n prediction_key.PredictionKey.CLASSES),\n auc_key:\n self._metric_spec(_streaming_auc_with_class_id_label,\n prediction_key.PredictionKey.PROBABILITIES)\n }\n\n def _class_predictions_streaming_mean(predictions,\n labels,\n weights=None,\n class_id=None):\n del labels\n return metrics_lib.streaming_mean(\n array_ops.where(\n math_ops.equal(\n math_ops.to_int32(class_id), math_ops.to_int32(predictions)),\n array_ops.ones_like(predictions),\n array_ops.zeros_like(predictions)),\n weights=weights)\n\n def _class_labels_streaming_mean(predictions,\n labels,\n weights=None,\n class_id=None):\n del predictions\n assert class_id is not None\n return metrics_lib.streaming_mean(\n array_ops.where(\n math_ops.equal(\n math_ops.to_int32(class_id), math_ops.to_int32(labels)),\n array_ops.ones_like(labels), array_ops.zeros_like(labels)),\n weights=weights)\n\n def _class_streaming_auc(predictions, labels, weights=None, class_id=None):\n assert class_id is not None\n indicator_labels = _class_id_labels_to_indicator(\n labels, num_classes=self.logits_dimension)\n return _streaming_auc(\n predictions, indicator_labels, weights=weights, class_id=class_id)\n\n for class_id in self._metric_class_ids:\n\n # TODO(ptucker): Add per-class accuracy, precision, recall.\n\n prediction_mean_key = _summary_key(\n self.head_name, metric_key.MetricKey.CLASS_PREDICTION_MEAN % class_id)\n label_mean_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_LABEL_MEAN %\n class_id)\n probability_mean_key = _summary_key(\n self.head_name,\n metric_key.MetricKey.CLASS_PROBABILITY_MEAN % class_id)\n logits_mean_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_LOGITS_MEAN %\n class_id)\n auc_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_AUC % class_id)\n\n metrics[prediction_mean_key] = self._metric_spec(\n functools.partial(\n _class_predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.CLASSES)\n metrics[label_mean_key] = self._metric_spec(\n functools.partial(\n _class_labels_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.PROBABILITIES)\n metrics[probability_mean_key] = self._metric_spec(\n functools.partial(\n _predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.PROBABILITIES)\n metrics[logits_mean_key] = self._metric_spec(\n functools.partial(\n _predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.LOGITS)\n metrics[auc_key] = self._metric_spec(\n functools.partial(\n _class_streaming_auc, class_id=class_id),\n prediction_key.PredictionKey.LOGITS)\n\n return metrics", "def _default_metrics(self):\n loss_key = _summary_key(self.head_name, metric_key.MetricKey.LOSS)\n accuracy_key = _summary_key(self.head_name, metric_key.MetricKey.ACCURACY)\n auc_key = _summary_key(self.head_name, metric_key.MetricKey.AUC)\n\n metrics = {\n loss_key:\n _weighted_average_loss_metric_spec(\n self._loss_fn, prediction_key.PredictionKey.LOGITS,\n self._label_name, self._weight_column_name),\n # TODO(b/29366811): This currently results in both an \"accuracy\" and an\n # \"accuracy/threshold_0.500000_mean\" metric for binary classification.\n accuracy_key:\n self._metric_spec(metrics_lib.streaming_accuracy,\n prediction_key.PredictionKey.CLASSES),\n auc_key:\n self._metric_spec(_streaming_auc,\n prediction_key.PredictionKey.PROBABILITIES),\n }\n\n for class_id in self._metric_class_ids:\n\n # TODO(ptucker): Add per-class accuracy, precision, recall.\n\n prediction_mean_key = _summary_key(\n self.head_name, metric_key.MetricKey.CLASS_PREDICTION_MEAN % class_id)\n label_mean_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_LABEL_MEAN %\n class_id)\n probability_mean_key = _summary_key(\n self.head_name,\n metric_key.MetricKey.CLASS_PROBABILITY_MEAN % class_id)\n logits_mean_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_LOGITS_MEAN %\n class_id)\n auc_key = _summary_key(self.head_name,\n metric_key.MetricKey.CLASS_AUC % class_id)\n\n metrics[prediction_mean_key] = self._metric_spec(\n functools.partial(\n _predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.CLASSES)\n metrics[label_mean_key] = self._metric_spec(\n functools.partial(\n _indicator_labels_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.CLASSES)\n metrics[probability_mean_key] = self._metric_spec(\n functools.partial(\n _predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.PROBABILITIES)\n metrics[logits_mean_key] = self._metric_spec(\n functools.partial(\n _predictions_streaming_mean, class_id=class_id),\n prediction_key.PredictionKey.LOGITS)\n metrics[auc_key] = self._metric_spec(\n functools.partial(\n _streaming_auc, class_id=class_id),\n prediction_key.PredictionKey.LOGITS)\n\n return metrics", "def merge_metric_stats(self, snapshot):\n\n if not self.__settings:\n return\n\n for key, other in six.iteritems(snapshot.__stats_table):\n stats = self.__stats_table.get(key)\n if not stats:\n self.__stats_table[key] = other\n else:\n stats.merge_stats(other)", "def collect_metrics() -> Tuple[Dict[str, Dict[str, Any]], Dict[str, List[str]]]:\n metric_docs: Dict[str, Dict[str, Any]] = {}\n metrics_by_integration: DefaultDict[str, List[str]] = defaultdict(list)\n # Reverse to keep backwards-compatible behavior with old script that kept\n # the last metric seen.\n for metric_yaml_file in sorted(INTEGRATIONS_PATH.glob(\"*/metrics.yaml\")):\n if \"Example\" in str(metric_yaml_file):\n continue\n\n for metric_name, metric in (yaml.safe_load(metric_yaml_file.read_text(encoding=\"utf-8\")) or {}).items():\n metrics_by_integration[metric_yaml_file.parent.name].append(metric_name)\n\n if metric_name in metric_docs:\n # print(f\"WARNING metric {metric_name} is duplicated, info will be taken from first one processed only\")\n continue\n\n desc = \"\"\n if \"description\" in metric:\n desc = metric[\"description\"]\n del metric[\"description\"]\n metric_docs[metric_name] = {\"yaml\": metric, \"markdown\": desc}\n return metric_docs, dict(metrics_by_integration)", "def compute_metrics(self, results: list) -> dict:", "def _aggregate_across_towers(metrics_collections, metric_value_fn, *args):\n def fn(distribution, *a):\n \"\"\"Call `metric_value_fn` in the correct control flow context.\"\"\"\n if hasattr(distribution, '_outer_control_flow_context'):\n # If there was an outer context captured before this method was called,\n # then we enter that context to create the metric value op. If the\n # caputred context is `None`, ops.control_dependencies(None) gives the\n # desired behavior. Else we use `Enter` and `Exit` to enter and exit the\n # captured context.\n # This special handling is needed because sometimes the metric is created\n # inside a while_loop (and perhaps a TPU rewrite context). But we don't\n # want the value op to be evaluated every step or on the TPU. So we\n # create it outside so that it can be evaluated at the end on the host,\n # once the update ops have been evaluted.\n\n # pylint: disable=protected-access\n if distribution._outer_control_flow_context is None:\n with tf.control_dependencies(None):\n metric_value = metric_value_fn(distribution, *a)\n else:\n distribution._outer_control_flow_context.Enter()\n metric_value = metric_value_fn(distribution, *a)\n distribution._outer_control_flow_context.Exit()\n # pylint: enable=protected-access\n else:\n metric_value = metric_value_fn(distribution, *a)\n if metrics_collections:\n tf.add_to_collections(metrics_collections, metric_value)\n return metric_value\n\n return distribution_strategy_context.get_tower_context().merge_call(fn, *args)", "def merge_accumulators(\n self,\n wrapper_accumulators: Iterable[WrapperAccumulator]) -> WrapperAccumulator:\n result = self.create_accumulator()\n for wrapper_accumulator in wrapper_accumulators:\n for feature_path, accumulator_for_feature in wrapper_accumulator.items():\n wrapped_accumulators = self._get_wrapped_accumulators(\n result, feature_path)\n for index, generator in enumerate(self._feature_stats_generators):\n wrapped_accumulators[index] = generator.merge_accumulators(\n [wrapped_accumulators[index], accumulator_for_feature[index]])\n return result", "def build_summary(self):\n for k, v in self.metrics.items():\n tf.summary.scalar(k, v)\n \n self.summary_op = tf.summary.merge_all()", "def _render_recent(self, metrics_config, requests):\n stats_dict = {}\n for metric_name, categorizer, metric, nested_config in metrics_config:\n # Metrics config can contain following types of items:\n # - str, None, callable(metric), None\n # - str, callable(categorizer), callable(metric), None\n # - str, callable(categorizer), None, nested config(tuple)\n\n if categorizer is None:\n # Compute single metric if key is str\n stats_dict[metric_name] = metric(requests)\n continue\n\n stats_dict[metric_name] = categories_stats = {}\n\n # Grouping requests by category\n grouped_by_category = defaultdict(list)\n for request_info in requests:\n category = categorizer(request_info)\n if category is HIDDEN_CATEGORY:\n continue\n grouped_by_category[category].append(request_info)\n\n if nested_config is None:\n # Compute single metric for each category\n for category, requests_group in grouped_by_category.items():\n categories_stats[category] = metric(requests_group)\n else:\n # Render nested stats for each category\n for category, requests_group in grouped_by_category.items():\n categories_stats[category] = self._render_recent(\n nested_config, requests_group\n )\n return stats_dict", "def get_metrics(cm, list_metrics):\n dic_metrics = {}\n total = np.sum(cm)\n\n if 'accuracy' in list_metrics:\n out = np.sum(np.diag(cm))\n dic_metrics['accuracy'] = out/total\n\n if 'pres_0' in list_metrics:\n num = cm[0, 0]\n den = cm[:, 0].sum()\n dic_metrics['pres_0'] = num/den if den > 0 else 0\n\n if 'pres_1' in list_metrics:\n num = cm[1, 1]\n den = cm[:, 1].sum()\n dic_metrics['pres_1'] = num/den if den > 0 else 0\n\n if 'recall_0' in list_metrics:\n num = cm[0, 0]\n den = cm[0, :].sum()\n dic_metrics['recall_0'] = num/den if den > 0 else 0\n\n if 'recall_1' in list_metrics:\n num = cm[1, 1]\n den = cm[1, :].sum()\n dic_metrics['recall_1'] = num/den if den > 0 else 0\n\n return dic_metrics", "def aggregate_statistics(self, new_stats):\n \n if isinstance(new_stats,RunStatistics):\n new_stats = [new_stats, ]\n elif isinstance(new_stats,list):\n if any(not isinstance(_,RunStatistics) for _ in new_stats):\n raise MadGraph5Error, \"The 'new_stats' argument of the function \"+\\\n \"'updtate_statistics' must be a (possibly list of) \"+\\\n \"RunStatistics instance.\"\n \n keys = set([])\n for stat in [self,]+new_stats:\n keys |= set(stat.keys())\n\n new_stats = new_stats+[self,]\n for key in keys:\n # Define special rules\n if key=='max_precision':\n # The minimal precision corresponds to the maximal value for PREC\n self[key] = min( _[key] for _ in new_stats if key in _)\n elif key=='min_precision':\n # The maximal precision corresponds to the minimal value for PREC\n self[key] = max( _[key] for _ in new_stats if key in _)\n elif key=='averaged_timing':\n n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if\n 'n_madloop_calls' in _)\n if n_madloop_calls > 0 :\n self[key] = sum(_[key]*_['n_madloop_calls'] for _ in \n new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls\n else:\n # Now assume all other quantities are cumulative\n self[key] = sum(_[key] for _ in new_stats if key in _)", "def get_metric_fns(self, metrics=None):\n metric_fn_dict = {\n \"auc\": lambda labels, pred_probs: 0.0\n if (labels.sum() == len(labels)) or (labels.sum() == 0)\n else roc_auc_score(labels, pred_probs),\n \"auprc\": average_precision_score,\n \"brier\": brier_score_loss,\n \"loss_bce\": log_loss,\n }\n if metrics is None:\n return metric_fn_dict\n else:\n return {\n key: value for key, value in metric_fn_dict.items() if key in metrics\n }", "def update_metrics(self, metrics, predictions, labels):\n return", "def compute_metrics(self):\n self.finalize_output_dict()\n self.metric_dict = {\n key: value(self.output_dict[\"labels\"], self.output_dict[\"pred_probs\"])\n for key, value in self.metric_fns.items()\n }", "def _get_metrics_to_collect(self, instance_key, additional_metrics):\n if instance_key not in self.metrics_to_collect_by_instance:\n self.metrics_to_collect_by_instance[instance_key] = \\\n self._build_metric_list_to_collect(additional_metrics)\n return self.metrics_to_collect_by_instance[instance_key]", "def plot_lc_many_metric(scores, outdir:Path, metrics:list=None, **plot_args):\n if metrics is None:\n metrics = scores['metric'].unique()\n\n for met in metrics:\n if met not in metrics:\n continue\n else:\n ax = plot_lc_single_metric(scores, metric_name=met, **plot_args)\n ax.legend(frameon=True, fontsize=10, loc='best')\n ax.grid(True)\n plt.tight_layout()\n plt.savefig(outdir/(f'lc.{met}.png'), dpi=200)\n del ax\n\n return None", "def _aggregate_across_towers(metrics_collections, metric_value_fn, *args):\n def fn(distribution, *a):\n \"\"\"Call `metric_value_fn` in the correct control flow context.\"\"\"\n if hasattr(distribution, '_outer_control_flow_context'):\n # If there was an outer context captured before this method was called,\n # then we enter that context to create the metric value op. If the\n # caputred context is `None`, ops.control_dependencies(None) gives the\n # desired behavior. Else we use `Enter` and `Exit` to enter and exit the\n # captured context.\n # This special handling is needed because sometimes the metric is created\n # inside a while_loop (and perhaps a TPU rewrite context). But we don't\n # want the value op to be evaluated every step or on the TPU. So we\n # create it outside so that it can be evaluated at the end on the host,\n # once the update ops have been evaluted.\n\n # pylint: disable=protected-access\n if distribution._outer_control_flow_context is None:\n with ops.control_dependencies(None):\n metric_value = metric_value_fn(distribution, *a)\n else:\n distribution._outer_control_flow_context.Enter()\n metric_value = metric_value_fn(distribution, *a)\n distribution._outer_control_flow_context.Exit()\n # pylint: enable=protected-access\n else:\n metric_value = metric_value_fn(distribution, *a)\n if metrics_collections:\n ops.add_to_collections(metrics_collections, metric_value)\n return metric_value\n\n return distribute_lib.get_tower_context().merge_call(fn, *args)", "def get_union_metrics(metric_a, metric_b):\n if metric_a is None and metric_b is None:\n return None\n elif metric_a is None:\n return metric_b\n elif metric_b is None:\n return metric_a\n else:\n # The order of metric_list need to be consistent among all hosts in distributed training\n # So we have metric_list sorted here.\n metric_list = sorted(list(set(metric_a).union(metric_b)))\n return metric_list", "def register_add_metric_callbacks(\n self, add_metrics_callbacks: List[types.AddMetricsCallbackType]) -> None:\n with self._graph.as_default():\n features_dict, predictions_dict, labels_dict = (\n self.get_features_predictions_labels_dicts())\n features_dict = util.wrap_tensor_or_dict_of_tensors_in_identity(\n features_dict)\n predictions_dict = util.wrap_tensor_or_dict_of_tensors_in_identity(\n predictions_dict)\n labels_dict = util.wrap_tensor_or_dict_of_tensors_in_identity(labels_dict)\n\n metric_ops = {}\n for add_metrics_callback in add_metrics_callbacks:\n new_metric_ops = add_metrics_callback(features_dict, predictions_dict,\n labels_dict)\n overlap = set(new_metric_ops) & set(metric_ops)\n if overlap:\n raise ValueError('metric keys should not conflict, but an '\n 'earlier callback already added the metrics '\n 'named %s' % overlap)\n metric_ops.update(new_metric_ops)\n self.register_additional_metric_ops(metric_ops)", "def get_all_metrics():\n return get_overlap_metrics() + get_distance_metrics() + get_distance_metrics()", "def get_multitask_metrics(metric_tasks = ()):\n\n @flax.struct.dataclass\n class MultiTaskMetric(metrics.Metric):\n \"\"\"MultiTaskMetric.\n\n This metric aggregates sub-metrics in the metric_dict and return the metrics\n of all of them by calling them separately.\n\n Attributes:\n tasks: A sequence of tasks to compute metrics over.\n \"\"\"\n tasks: Tasks = metric_tasks\n\n @classmethod\n @gin_utils.allow_remapping(name='get_multitask_metrics')\n def from_model_output(cls, outputs,\n labels):\n \"\"\"Accumulates model outputs for evaluation.\n\n Args:\n outputs: A dictionary with the following structure:\n key name: Task name.\n value content: A dictionary to corresponding task specific outputs.\n labels: A dictionary with the following structure:\n key name: Task name.\n value content: A dictionary corresponding task specific labels.\n\n Returns:\n A metric object initialized from the outputs and labels.\n\n Raises:\n KeyError: Missing task-specific outputs or labels.\n \"\"\"\n new_tasks = []\n for task in cls.tasks:\n task_outputs, task_labels = (\n task.filter_by_task(outputs), task.filter_by_task(labels))\n if not task_outputs:\n raise KeyError(f'No task outputs for task: {task.name}!')\n if task_labels is None:\n raise KeyError(f'No task labels for task: {task.name}!')\n\n metric = task.metric.from_model_output(task_outputs, task_labels)\n new_tasks.append(type(task)(metric=metric))\n\n return cls(tasks=new_tasks)\n\n def merge(self, other):\n new_tasks = []\n assert len(self.tasks) == len(other.tasks)\n for task, other_task in zip(self.tasks, other.tasks):\n metric = task.metric.merge(other_task.metric)\n new_tasks.append(type(task)(metric=metric))\n\n return type(self)(tasks=new_tasks)\n\n def reduce(self):\n new_tasks = []\n for task in self.tasks:\n metric = task.metric.reduce()\n new_tasks.append(type(task)(metric=metric))\n\n return type(self)(tasks=new_tasks)\n\n def compute(self):\n output_metric = {}\n for task in self.tasks:\n task_metric = task.metric.compute()\n output_metric.update(task.prepend_by_task(task_metric))\n\n return output_metric\n\n return MultiTaskMetric", "def get_all_metrics(self):\n metrics = {}\n for item in self.list_metrics():\n metric_name = item[2]\n metric = self.get_metric(\n item,\n existing_dict=metrics.get(metric_name, None))\n metrics[metric_name] = metric\n return metrics", "def build_metrics_gauge_data(gauge_metrics):\n return [{'name': name, 'value': value} for name, value in iteritems(gauge_metrics)]", "def merge_stats(self, other):\n\n self[0] += other[0]\n self[1] += other[1]\n self[2] += other[2]\n\n self[3] = ((self[0] or self[1] or self[2]) and\n min(self[3], other[3]) or other[3])\n self[4] = max(self[4], other[3])", "def record_custom_metric(self, name, value):\n if isinstance(value, dict):\n if len(value) == 1 and 'count' in value:\n new_stats = CountStats(call_count=value['count'])\n else:\n new_stats = TimeStats(*c2t(**value))\n else:\n new_stats = TimeStats(1, value, value, value, value, value**2)\n\n stats = self.__stats_table.get(name)\n if stats is None:\n self.__stats_table[name] = new_stats\n else:\n stats.merge_stats(new_stats)", "def parse_metrics(self, metrics):\n # type: (list) -> Tuple[List[OID], List[OID], List[OID], List[ParsedMetric]]\n # Use bulk for SNMP version > 1 only.\n bulk_threshold = self.bulk_threshold if self._auth_data.mpModel else 0\n result = parse_metrics(metrics, resolver=self._resolver, logger=self.logger(), bulk_threshold=bulk_threshold)\n return result['oids'], result['next_oids'], result['bulk_oids'], result['parsed_metrics']", "def log_metrics(self, metrics: dict):\n self.metrics.update(metrics)\n\n self._sync_log_event()", "def result(\n metrics: Dict[metric_types.MetricKey, Any]\n ) -> Dict[metric_types.MetricKey, Any]:\n matrix = metrics[matrices_metric_key]\n examples = metrics[examples_metric_key]\n\n output = {}\n for i, threshold in enumerate(matrix.thresholds):\n output[metric_key_by_name_by_threshold[threshold]\n ['positive_to_negative']] = matrix.fn[i]\n output[metric_key_by_name_by_threshold[threshold]\n ['negative_to_positive']] = matrix.fp[i]\n output[metric_key_by_name_by_threshold[threshold]\n ['positive_to_negative_examples_ids']] = np.array(\n examples.fn_examples[i])\n output[metric_key_by_name_by_threshold[threshold]\n ['negative_to_positive_examples_ids']] = np.array(\n examples.fp_examples[i])\n output[metric_key_by_name_by_threshold[threshold]\n ['positive_examples_count']] = matrix.fn[i] + matrix.tp[i]\n output[metric_key_by_name_by_threshold[threshold]\n ['negative_examples_count']] = matrix.fp[i] + matrix.tn[i]\n\n return output", "def collect_per_output_metric_info(metrics,\n output_names,\n output_shapes,\n loss_fns,\n from_serialized=False,\n is_weighted=False):\n if not metrics:\n return [{} for _ in output_names]\n\n if isinstance(metrics, list):\n any_sub_list = any(isinstance(m, list) for m in metrics)\n if any_sub_list:\n if len(metrics) != len(output_names):\n raise ValueError('When passing a list of lists as `metrics`, '\n 'it should have one entry per model output. '\n 'The model has ' + str(len(output_names)) +\n ' outputs, but you passed metrics=' + str(metrics))\n # User has provided a list of len = len(outputs).\n nested_metrics = [generic_utils.to_list(m) for m in metrics]\n else:\n # If it is a single list we then apply all metrics to all outputs.\n if len(output_names) > 1:\n nested_metrics = []\n for _ in output_names:\n nested_metrics.append(\n [metrics_module.clone_metric(m) for m in metrics])\n else:\n nested_metrics = [metrics]\n elif isinstance(metrics, collections.abc.Mapping):\n generic_utils.check_for_unexpected_keys('metrics', metrics, output_names)\n nested_metrics = []\n for name in output_names:\n output_metrics = generic_utils.to_list(metrics.get(name, []))\n nested_metrics.append(output_metrics)\n else:\n raise TypeError('Type of `metrics` argument not understood. '\n 'Expected a list or dictionary, found: ' + str(metrics))\n\n per_output_metrics = []\n for i, metrics in enumerate(nested_metrics):\n metrics_dict = collections.OrderedDict()\n for metric in metrics:\n metric_name = get_metric_name(metric, is_weighted)\n metric_fn = get_metric_function(\n metric, output_shape=output_shapes[i], loss_fn=loss_fns[i])\n metric_fn._from_serialized = from_serialized # pylint: disable=protected-access\n\n # If the metric function is not stateful, we create a stateful version.\n if not isinstance(metric_fn, metrics_module.Metric):\n metric_fn = metrics_module.MeanMetricWrapper(\n metric_fn, name=metric_name)\n # If the metric is being revived from something stateless, such as a\n # string (e.g. \"accuracy\"), we may need to later reapply transformations\n # such as renaming.\n metric_fn._from_serialized = False # pylint: disable=protected-access\n metrics_dict[metric_name] = metric_fn\n per_output_metrics.append(metrics_dict)\n\n return per_output_metrics", "def _dispatch_metrics(self, payload):\n for item in payload:\n try:\n self._ingest.send(gauges=item['gauges'], counters=item['counters'])\n except Exception as e:\n self._logger.error(\"Exception while sending payload to ingest : {0}\".format(e))", "def __init_metrics(self):\n\n batch = {}\n # split data into batches of size batch_size or less\n for metric_name, metric_pattern in self.metrics.items():\n # get the batch list for that metric\n batch_list = []\n for s in range(1, self.schema + 1):\n for t in range(1, self.table + 1):\n k = '/metrics/type=IndexTable/keyspace={}/scope={}/name={}/mean'.format(s, t, metric_name)\n # from Python 3.6 onwards, the standard dict type maintains insertion order by default\n batch[k] = 0\n # if the batch has batch_size items or at the end of iteration,\n # append the batch to list of that metric and create a new empty batch\n if len(batch) == self.batch_size or (s == self.schema and t == self.table):\n batch_list.append(batch)\n batch = {}\n\n # parse metric patterns\n l = metric_pattern.split()\n if l[0] == '(>':\n self.metrics[metric_name] = IncMetricStruct(float(int(l[1])), float(l[2][1:]), float(l[4][:-2]),\n batch_list)\n else:\n self.metrics[metric_name] = RandMetricStruct(float(l[0][1:]), float(l[-1][:-1]), batch_list)", "def weighted_average(metrics: List[Tuple[int, Metrics]]) -> Metrics:\n # Multiply accuracy of each client by number of examples used\n accuracies = [num_examples * m[\"accuracy\"] for num_examples, m in metrics]\n examples = [num_examples for num_examples, _ in metrics]\n\n # Aggregate and return custom metric (weighted average)\n return {\"accuracy\": sum(accuracies) / sum(examples)}", "def _default_metrics(self):\n metrics = {\n _summary_key(self.head_name, metric_key.MetricKey.LOSS):\n _weighted_average_loss_metric_spec(\n self._loss_fn, prediction_key.PredictionKey.LOGITS,\n self._label_name, self._weight_column_name)\n }\n\n # TODO(b/29366811): This currently results in both an \"accuracy\" and an\n # \"accuracy/threshold_0.500000_mean\" metric for binary classification.\n metrics[_summary_key(self.head_name, metric_key.MetricKey.ACCURACY)] = (\n metric_spec.MetricSpec(metrics_lib.streaming_accuracy,\n prediction_key.PredictionKey.CLASSES,\n self._label_name, self._weight_column_name))\n\n def _add_binary_metric(key, metric_fn):\n metrics[_summary_key(self.head_name, key)] = metric_spec.MetricSpec(\n metric_fn, prediction_key.PredictionKey.LOGISTIC, self._label_name,\n self._weight_column_name)\n\n _add_binary_metric(metric_key.MetricKey.PREDICTION_MEAN,\n _predictions_streaming_mean)\n _add_binary_metric(metric_key.MetricKey.LABEL_MEAN,\n _indicator_labels_streaming_mean)\n\n # Also include the streaming mean of the label as an accuracy baseline, as\n # a reminder to users.\n _add_binary_metric(metric_key.MetricKey.ACCURACY_BASELINE,\n _indicator_labels_streaming_mean)\n\n _add_binary_metric(metric_key.MetricKey.AUC, _streaming_auc)\n\n for threshold in self._thresholds:\n _add_binary_metric(metric_key.MetricKey.ACCURACY_MEAN % threshold,\n _accuracy_at_threshold(threshold))\n # Precision for positive examples.\n _add_binary_metric(\n metric_key.MetricKey.PRECISION_MEAN % threshold,\n _streaming_at_threshold(metrics_lib.streaming_precision_at_thresholds,\n threshold),)\n # Recall for positive examples.\n _add_binary_metric(metric_key.MetricKey.RECALL_MEAN % threshold,\n _streaming_at_threshold(\n metrics_lib.streaming_recall_at_thresholds,\n threshold))\n return metrics", "def aggregate_metrics(parent_dir, metrics):\r\n # Get the metrics for the folder if it has results from an experiment\r\n metrics_file = os.path.join(parent_dir, 'metrics_val_best_weights.json')\r\n if os.path.isfile(metrics_file):\r\n with open(metrics_file, 'r') as f:\r\n metrics[parent_dir] = json.load(f)\r\n\r\n # Check every subdirectory of parent_dir\r\n for subdir in os.listdir(parent_dir):\r\n if not os.path.isdir(os.path.join(parent_dir, subdir)):\r\n continue\r\n else:\r\n aggregate_metrics(os.path.join(parent_dir, subdir), metrics)", "def add_results(self, results):\n if self.replication_counter < self.replication_num:\n for metric in self.metrics:\n self.metric_final_results[metric].append(results[metric])\n\n self.replication_counter += 1\n else:\n raise Exception(\"The requested metric collection call of {}/{} exceeds the number of pre-defined replication\".format(self.replication_counter, self.replication_num))", "def changeMetrics(self, metrics):\n if isinstance(metrics,list) == False:\n metrics = [metrics]\n self.metrics = metrics\n\n whatMetrics = []\n\n for i in metrics:\n if i == RMSE:\n whatMetrics.append(\"RMSE\")\n elif i == f1Score:\n whatMetrics.append(\"f1Score\")\n elif i == recall:\n whatMetrics.append(\"recall\")\n elif i == precision:\n whatMetrics.append(\"precision\")\n elif i == mean_squared_error:\n whatMetrics.append(\"mean_squared_error\")\n elif i == mean_absolute_error:\n whatMetrics.append(\"mean_absolute_error\")\n elif i == mean_absolute_percentage_error:\n whatMetrics.append(\"mean_absolute_percentage_error\")\n elif isinstance(i,str):\n whatMetrics.append(i)\n else:\n print(\"I don't know what to do with : \" + str(i))\n\n self.metricsAsString = whatMetrics", "def prepare_multiple_perf_metrics(run_dict):\n multiple_perf_metrics = {}\n for run_label, run_name in run_dict.items():\n output_parser = OutputParser(run_name, use_most_recent=False)\n perf_metrics = performance_calculations.performance_metrics(output_parser)\n multiple_perf_metrics[run_label] = perf_metrics\n return multiple_perf_metrics", "def _average_training_metrics(\n self, per_batch_metrics: List[Dict[str, Any]]\n ) -> List[Dict[str, Any]]:\n check.true(self.hvd_config.use, \"Can only average training metrics in multi-GPU training.\")\n metrics_timeseries = util._list_to_dict(per_batch_metrics)\n\n # combined_timeseries is: dict[metric_name] -> 2d-array.\n # A measurement is accessed via combined_timeseries[metric_name][process_idx][batch_idx].\n combined_timeseries, _ = self._combine_metrics_across_processes(\n metrics_timeseries, num_batches=len(per_batch_metrics)\n )\n\n # If the value for a metric is a single-element array, the averaging process will\n # change that into just the element. We record what metrics are single-element arrays\n # so we can wrap them in an array later (for perfect compatibility with non-averaging\n # codepath).\n array_metrics = []\n for metric_name in per_batch_metrics[0].keys():\n if isinstance(per_batch_metrics[0][metric_name], np.ndarray):\n array_metrics.append(metric_name)\n\n if self.is_chief:\n combined_timeseries_type = Dict[str, List[List[Any]]]\n combined_timeseries = cast(combined_timeseries_type, combined_timeseries)\n num_batches = len(per_batch_metrics)\n num_processes = hvd.size()\n averaged_metrics_timeseries = {} # type: Dict[str, List]\n\n for metric_name in combined_timeseries.keys():\n averaged_metrics_timeseries[metric_name] = []\n for batch_idx in range(num_batches):\n batch = [\n combined_timeseries[metric_name][process_idx][batch_idx]\n for process_idx in range(num_processes)\n ]\n\n np_batch = np.array(batch)\n batch_avg = np.mean(np_batch[np_batch != None]) # noqa: E711\n if metric_name in array_metrics:\n batch_avg = np.array(batch_avg)\n averaged_metrics_timeseries[metric_name].append(batch_avg)\n per_batch_metrics = util._dict_to_list(averaged_metrics_timeseries)\n return per_batch_metrics", "def get_min_or_max_values(metrics: dict, global_metrics: dict, fn2) -> dict:\n for ds_name in metrics:\n if ds_name not in global_metrics:\n global_metrics[ds_name] = {}\n\n feature_metrics = metrics[ds_name]\n for feature_name in feature_metrics:\n if feature_name not in global_metrics[ds_name]:\n global_metrics[ds_name][feature_name] = feature_metrics[feature_name]\n else:\n global_metrics[ds_name][feature_name] = fn2(\n global_metrics[ds_name][feature_name], feature_metrics[feature_name]\n )\n\n results = {}\n for ds_name in global_metrics:\n for feature_name in global_metrics[ds_name]:\n if feature_name not in results:\n results[feature_name] = global_metrics[ds_name][feature_name]\n else:\n results[feature_name] = fn2(results[feature_name], global_metrics[ds_name][feature_name])\n\n for ds_name in global_metrics:\n for feature_name in global_metrics[ds_name]:\n global_metrics[ds_name][feature_name] = results[feature_name]\n\n return global_metrics", "def handle_metrics(split: str, metrics: Dict[str, Union[int, float]], output_dir: str):\n\n logger.info(f\"***** {split} metrics *****\")\n for key in sorted(metrics.keys()):\n value = metrics[key]\n if isinstance(value, float):\n value = round(value, 4)\n logger.info(f\" {key} = {value}\")\n save_json(metrics, os.path.join(output_dir, f\"{split}_results.json\"))", "def _update_stats(stats, train_loss=None, train_accuracy=None, test_loss=None, test_accuracy=None,\n test_confusion_matrix=None):\n if train_loss:\n stats['train_loss'].append(train_loss)\n if train_accuracy:\n stats['train_accuracy'].append(train_accuracy)\n if test_loss:\n stats['test_loss'].append(test_loss)\n if test_accuracy:\n stats['test_accuracy'].append(test_accuracy)\n if test_confusion_matrix is not None:\n stats['test_confusion_matrix'].append(test_confusion_matrix)\n\n return stats", "def sum_dstats(self, stats, smetrics):\n avg = {}\n\n for disk, metrics in stats.iteritems():\n for mname, metric in metrics.iteritems():\n if mname not in smetrics:\n continue\n if mname in avg:\n avg[mname] += metric\n else:\n avg[mname] = metric\n\n return avg", "def _log_metrics(self, logs, prefix, step):\r\n if logs is None:\r\n logs = {}\r\n\r\n # Group metrics by the name of their associated file writer. Values\r\n # are lists of metrics, as (name, scalar_value) pairs.\r\n logs_by_writer = {\r\n self._train_run_name: [],\r\n self._validation_run_name: [],\r\n }\r\n validation_prefix = 'val_'\r\n for (name, value) in logs.items():\r\n if name in ('batch', 'size', 'num_steps'):\r\n # Scrub non-metric items.\r\n continue\r\n if name.startswith(validation_prefix):\r\n name = name[len(validation_prefix):]\r\n writer_name = self._validation_run_name\r\n else:\r\n writer_name = self._train_run_name\r\n name = prefix + name # assign batch or epoch prefix\r\n logs_by_writer[writer_name].append((name, value))\r\n\r\n with context.eager_mode():\r\n with summary_ops_v2.always_record_summaries():\r\n for writer_name in logs_by_writer:\r\n these_logs = logs_by_writer[writer_name]\r\n if not these_logs:\r\n # Don't create a \"validation\" events file if we don't\r\n # actually have any validation data.\r\n continue\r\n writer = self._get_writer(writer_name)\r\n with writer.as_default():\r\n for (name, value) in these_logs:\r\n summary_ops_v2.scalar(name, value, step=step)", "def to_metric(self):\r\n if self.units != 'metric':\r\n self.units = 'metric'\r\n for statement in self.statements:\r\n statement.to_metric()\r\n for tool in iter(self.tools.values()):\r\n tool.to_metric()\r\n for primitive in self.primitives:\r\n primitive.to_metric()\r\n for hit in self.hits:\r\n hit.to_metric()", "def initialize_metrics():\n metrics = {\n 'cd_losses': [],\n 'cd_corrects': [],\n 'cd_precisions': [],\n 'cd_recalls': [],\n 'cd_f1scores': [],\n }\n\n return metrics", "def metrics(self):\n \n if self.mse.shape[0]>1:\n raise ValueError('Metrics can only handle single observations.')\n \n if self.N==1:\n pred = float('nan')\n err = float('nan')\n y_true = float('nan')\n else:\n pred = int(self._predictions[-1])\n err = self._mse[-1]\n y_true = int(self.label[0])\n \n is_outlier = {\"type\":\"GAUGE\",\"key\":\"is_outlier\",\"value\":pred}\n mse = {\"type\":\"GAUGE\",\"key\":\"mse\",\"value\":err}\n obs = {\"type\":\"GAUGE\",\"key\":\"observation\",\"value\":self.N - 1}\n threshold = {\"type\":\"GAUGE\",\"key\":\"threshold\",\"value\":self.threshold}\n \n label = {\"type\":\"GAUGE\",\"key\":\"label\",\"value\":y_true}\n \n accuracy_tot = {\"type\":\"GAUGE\",\"key\":\"accuracy_tot\",\"value\":self.metric[4]}\n precision_tot = {\"type\":\"GAUGE\",\"key\":\"precision_tot\",\"value\":self.metric[5]}\n recall_tot = {\"type\":\"GAUGE\",\"key\":\"recall_tot\",\"value\":self.metric[6]}\n f1_score_tot = {\"type\":\"GAUGE\",\"key\":\"f1_tot\",\"value\":self.metric[7]}\n f2_score_tot = {\"type\":\"GAUGE\",\"key\":\"f2_tot\",\"value\":self.metric[8]}\n \n accuracy_roll = {\"type\":\"GAUGE\",\"key\":\"accuracy_roll\",\"value\":self.metric[9]}\n precision_roll = {\"type\":\"GAUGE\",\"key\":\"precision_roll\",\"value\":self.metric[10]}\n recall_roll = {\"type\":\"GAUGE\",\"key\":\"recall_roll\",\"value\":self.metric[11]}\n f1_score_roll = {\"type\":\"GAUGE\",\"key\":\"f1_roll\",\"value\":self.metric[12]}\n f2_score_roll = {\"type\":\"GAUGE\",\"key\":\"f2_roll\",\"value\":self.metric[13]}\n \n true_negative = {\"type\":\"GAUGE\",\"key\":\"true_negative\",\"value\":self.metric[0]}\n false_positive = {\"type\":\"GAUGE\",\"key\":\"false_positive\",\"value\":self.metric[1]}\n false_negative = {\"type\":\"GAUGE\",\"key\":\"false_negative\",\"value\":self.metric[2]}\n true_positive = {\"type\":\"GAUGE\",\"key\":\"true_positive\",\"value\":self.metric[3]}\n \n nb_outliers_roll = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_roll\",\"value\":self.metric[14]}\n nb_labels_roll = {\"type\":\"GAUGE\",\"key\":\"nb_labels_roll\",\"value\":self.metric[15]}\n nb_outliers_tot = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_tot\",\"value\":self.metric[16]}\n nb_labels_tot = {\"type\":\"GAUGE\",\"key\":\"nb_labels_tot\",\"value\":self.metric[17]}\n \n return [is_outlier,mse,obs,threshold,label,\n accuracy_tot,precision_tot,recall_tot,f1_score_tot,f2_score_tot,\n accuracy_roll,precision_roll,recall_roll,f1_score_roll,f2_score_roll,\n true_negative,false_positive,false_negative,true_positive,\n nb_outliers_roll,nb_labels_roll,nb_outliers_tot,nb_labels_tot]", "def update(self, data: Mapping[str, np.ndarray]) -> Self:\n\n for metric in self.metrics:\n metric.update(data)\n\n return self", "def update(self, outputs: torch.Tensor, targets: torch.Tensor) -> Tuple[Any, Any, Any, Any]:\n tn, fp, fn, tp, support = super().update(outputs=outputs, targets=targets)\n per_class, micro, macro, weighted = get_aggregated_metrics(\n tp=tp, fp=fp, fn=fn, support=support, zero_division=self.zero_division,\n )\n return per_class, micro, macro, weighted", "def reduce_metrics(logging_outputs) -> None:\n loss_sum = sum(log.get('loss', 0) for log in logging_outputs)\n admloss_sum = sum(log.get('admloss', 0) for log in logging_outputs)\n margin_n = sum(log.get('margin_n', 0) for log in logging_outputs)\n sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)\n nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)\n\n metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3)\n metrics.log_scalar('admloss', admloss_sum / sample_size / math.log(2), sample_size, round=3)\n metrics.log_scalar('margin_norm', margin_n / nsentences, 32, round=3)\n metrics.log_derived('ppl', lambda meters: round(2**meters['loss'].avg, 3))", "def reduce_metrics(logging_outputs) -> None:\n loss_sum = sum(log.get('loss', 0) for log in logging_outputs)\n neg_elbo_sum = sum(log.get('neg_elbo', 0) for log in logging_outputs)\n recon_loss_sum = sum(log.get('recon_loss', 0) for log in logging_outputs)\n ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)\n sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)\n nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)\n KLz_sum = sum(log.get('KLz', 0) for log in logging_outputs)\n KLt_sum = sum(log.get('KLt', 0) for log in logging_outputs)\n KLtheta_sum = sum(log.get('KLtheta', 0) for log in logging_outputs)\n\n if 'nll_iw' in logging_outputs[0]:\n nll_iw_sum = sum(log.get('nll_iw', 0) for log in logging_outputs)\n metrics.log_scalar('nll_iw_s', nll_iw_sum / nsentences, \n nsentences, round=3, priority=4)\n metrics.log_scalar('nll_iw_t', nll_iw_sum / ntokens / math.log(2), \n ntokens, round=3, priority=5) \n metrics.log_derived('ppl_iw', lambda meters: utils.get_perplexity(meters['nll_iw_t'].avg), priority=6)\n\n else:\n metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), \n sample_size, round=3, priority=3)\n\n metrics.log_scalar('neg_elbo_s', neg_elbo_sum / nsentences, \n nsentences, round=3, priority=4)\n metrics.log_scalar('recon_loss_s', recon_loss_sum / nsentences, \n nsentences, round=3, priority=4)\n\n metrics.log_scalar('neg_elbo_t', neg_elbo_sum / ntokens / math.log(2), \n ntokens, round=3, priority=5)\n metrics.log_scalar('recon_loss_t', recon_loss_sum / ntokens / math.log(2), \n ntokens, round=3, priority=5)\n\n metrics.log_scalar('KLz', KLz_sum / nsentences, nsentences, round=1, priority=8)\n metrics.log_scalar('KLt', KLt_sum / nsentences, nsentences, round=1, priority=8)\n metrics.log_scalar('KLtheta', KLtheta_sum / nsentences, nsentences, round=1, priority=8)\n\n metrics.log_derived('ppl', lambda meters: utils.get_perplexity(meters['neg_elbo_t'].avg), priority=6)\n metrics.log_derived('recon_ppl', lambda meters: utils.get_perplexity(meters['recon_loss_t'].avg), priority=7)\n\n if 'active' in logging_outputs[0]:\n metrics.log_scalar('active', logging_outputs[0]['active'], weight=0, round=1, priority=10)\n metrics.log_scalar('percent', logging_outputs[0]['percent'], weight=0, round=2, priority=10)\n # metrics.log_scalar('nlow', logging_outputs[0]['nlow'], weight=0, priority=10)\n # metrics.log_scalar('nhigh', logging_outputs[0]['nhigh'], weight=0, priority=10)", "def _aggregate_perf_data(perf_all_ordinals: List[str]):\n aggregate = {}\n\n pd = PerfData()\n for data in perf_all_ordinals:\n worker_pd = PerfData(**json.loads(data))\n if len(perf_all_ordinals) > 1:\n aggregate.setdefault(\"ordinals\", [])\n aggregate[\"ordinals\"].append(worker_pd.throughput_dict())\n\n pd.merge(worker_pd)\n\n aggregate.update(dataclasses.asdict(pd))\n return aggregate", "def __init__(\n self, config: Config, metrics: Union[Metric, Iterable[Metric]]\n ) -> None:\n self._metrics: Dict[Metric, Optional[float]] = {}\n\n if isinstance(metrics, Metric):\n self._metrics[metrics] = None\n else:\n for key in metrics:\n self._metrics[key] = None\n self._metric_funcs = {\n Metric.ACCURACY: accuracy,\n Metric.RECALL: recall,\n Metric.PRECISION: precision,\n Metric.F1: f_1,\n Metric.CONSISTENCY: consistency,\n }\n self._config = config\n self._ids: List[str] = []\n self._preds: List[str] = []\n self._targets: List[str] = []", "def process_metrics_overall(\n self, the_dict, names=[\"metric\", \"phase\", \"epoch\", \"performance\"]\n ):\n result = (\n pd.DataFrame(the_dict)\n .reset_index()\n .melt(id_vars=\"index\")\n .set_index([\"index\", \"variable\"])\n .value.apply(pd.Series)\n .stack()\n .reset_index()\n )\n result.columns = names\n return result", "def calc_metrics(data, sampled_data_list, dataset_type):\n result={}\n for sampled_data in sampled_data_list:\n c2st_roc_auc_metric = c2st_roc_auc(data, sampled_data)\n if \"c2st_roc_auc\" in result:\n result[\"c2st_roc_auc\"].append(c2st_roc_auc_metric)\n else:\n result[\"c2st_roc_auc\"] = [c2st_roc_auc_metric]\n mmd_p_val, mmd_stat = rbf_mmd_test(data.values, sampled_data.values)\n if \"mmd_p_val\" in result:\n result[\"mmd_p_val\"].append(mmd_p_val)\n result[\"mmd_stat\"].append(mmd_stat)\n else:\n result[\"mmd_p_val\"] = [mmd_p_val]\n result[\"mmd_stat\"] = [mmd_stat]\n ks_p_val, ks_stat, ks_n, ks_p_val_list, ks_stat_list = ks_test(data, sampled_data)\n if dataset_type != \"norm_dataset\":\n ks_p_val = ks_permutation(ks_stat_list, data, sampled_data)\n if \"ks_p_val\" in result:\n result[\"ks_p_val\"].append(ks_p_val)\n result[\"ks_stat\"].append(ks_stat)\n else:\n result[\"ks_p_val\"] = [ks_p_val]\n result[\"ks_stat\"] = [ks_stat]\n acc_r, acc_g = c2st_accuracy(data, sampled_data)\n if \"c2st_acc_r\" in result:\n result[\"c2st_acc_r\"].append(acc_r)\n result[\"c2st_acc_g\"].append(acc_g)\n else:\n result[\"c2st_acc_r\"] = [acc_r]\n result[\"c2st_acc_g\"] = [acc_g]\n return result", "def sum_activity_metrics(activityDict, metricNames, activityName=\"main_thread\"):\n assert isinstance(activityDict, dict)\n \n return [sum(x) for x in zip(\n *(get_activity_samples(activityDict, metricNames, activityName).values()))]" ]
[ "0.67883134", "0.6653943", "0.66358435", "0.65713274", "0.65646595", "0.64966273", "0.64369106", "0.6254954", "0.6035345", "0.6030221", "0.5992154", "0.5904505", "0.58831435", "0.5876968", "0.58738685", "0.586156", "0.58568037", "0.58403516", "0.58366096", "0.5835867", "0.58253866", "0.5809571", "0.57690775", "0.57638687", "0.5761877", "0.57279545", "0.5714877", "0.56369394", "0.56266624", "0.5622822", "0.56204945", "0.5608718", "0.5607718", "0.55972373", "0.55972147", "0.5545563", "0.5544569", "0.55343926", "0.55327487", "0.5529173", "0.5505724", "0.5497596", "0.546907", "0.54686296", "0.5461987", "0.5443107", "0.5434074", "0.5432652", "0.54233927", "0.5387952", "0.53673154", "0.53638667", "0.53550106", "0.5340117", "0.53376645", "0.5329426", "0.53283525", "0.53272", "0.5322408", "0.5310111", "0.52973807", "0.52921325", "0.52913404", "0.52897656", "0.52773666", "0.52765054", "0.5271119", "0.525196", "0.52438164", "0.52407867", "0.5237594", "0.5235343", "0.5223238", "0.5217733", "0.5211706", "0.520698", "0.5203866", "0.5193307", "0.5191346", "0.5187341", "0.51852566", "0.5183477", "0.517767", "0.51668996", "0.51647294", "0.5158489", "0.5147805", "0.51457065", "0.51394147", "0.5134651", "0.5132064", "0.5129294", "0.51033866", "0.51017195", "0.5098655", "0.5095307", "0.5093319", "0.5091702", "0.5090645", "0.5089108" ]
0.8258332
0
Checks if player ready to be rendered on the character sheet
def is_player_ready(self): player = self.base.game_instance['player_ref'] if (player and base.player_states["is_alive"] and base.player_states["is_idle"] and not base.player_states["is_moving"] and not base.player_states["is_running"] and not base.player_states["is_crouch_moving"] and not base.player_states["is_crouching"] and not base.player_states["is_standing"] and not base.player_states["is_jumping"] and not base.player_states["is_h_kicking"] and not base.player_states["is_f_kicking"] and not base.player_states["is_using"] and not base.player_states["is_attacked"] and not base.player_states["is_busy"] and not base.player_states["is_turning"] and not base.player_states["is_mounted"] and not base.player_states["horse_riding"] and not self.base.game_instance["is_player_sitting"] and not player.get_python_tag("is_on_horse") ): return True else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_ready(self):\n if self.game.has_started():\n return True\n return self.status == self.PLAYER_READY", "def ready(self):\n return self.shader is not None and self.texturesReady()", "def check_ready(self):\r\n print \"Checking ready\"\r\n\t\tif self.game.trough.is_full():\r\n print \"Ready\"\r\n\t\t\tself.ready()\r\n\t\t\treturn True\r\n\t\tprint \"Not Ready\"\r\n\t\treturn False", "def set_ready(self):\n if self.game.has_started() or self.status == self.PLAYER_READY:\n return\n self.status = self.PLAYER_READY\n self.game.player_is_ready()", "def start_game_check(self):\n if len(self.pending_players) > 0:\n return False\n else:\n return True", "def enough_players():\n return True", "def requestReady(self):\n if self.team[self.team_num][self.map_pos].avatarLabel['text'] == \"\":\n return;\n \n if self.isHost:\n obj = {\"worldName\":self.worldInfo.worldName}\n main.cManager.sendRequest(Constants.CMSG_START_TO_READY_GAME, obj)\n \n else:\n obj ={\"worldName\": self.worldInfo.worldName}\n main.cManager.sendRequest(Constants.CMSG_READY, obj)\n self.isReady = 1", "def is_ready() -> bool:\n return True", "def check_for_tie(self):\n\n\t\tif len(self.player_model.available_cells) > 0:\n\t\t\treturn False\n\t\telse:\n\t\t\treturn True", "def is_ready(self) -> bool:\n pass", "def wait_to_play(self):\n\n\t\tself.player_model.current_player = self.player_model.rival_player\n\t\tself.player_frame.prepare_to_wait_turn(self.player_model.rival_player.name, self.player_model.available_cells)", "def can_play(self) -> bool:\n purple_card = self.game.board.purple\n return (\n self.game.current_player != self\n and purple_card is not None\n and purple_card.space > len(self.game.board.yellow[self])\n )", "def set_not_ready(self):\n if self.game.has_started() or self.status == self.PLAYER_NOT_READY:\n return\n self.status = self.PLAYER_NOT_READY", "def won_game(self):\n for player in self.players:\n if len(player.cards) == 0:\n\n return True\n return False", "def texturesReady(self):\n return (self.modulateTexture is not None and\n self.clipTexture is not None and\n self.colourTexture is not None and\n self.modulateTexture.ready() and\n self.clipTexture .ready() and\n self.colourTexture .ready())", "def is_ready(cls):\n\n return False", "def is_ready(self):\n return (self.is_calibrated() and not self.has_error()\n and not self.is_moving())", "def still_valid(self) -> bool:\n return self._data.player_alive(self._data.player_turn)", "def check_win(self):\n return UNEXPOSED not in self.get_game() and self.get_game().count(FLAG) == len(self.get_pokemon_location)", "def ready(self):\n return len(self.player1.ships) == len(\n self.player2.ships) == len(self.SHIP_INFO)", "def won(self):\n if self.current_room.name == \"Victory\":\n return True\n else:\n return False", "def ready(self):\n return self.time >= self.cooldown", "def _check_for_win(self):\n slots_available = any(\n [slot.available for slot in self.board.iter_slots() if not slot.mine]\n )\n if not slots_available:\n self.status = GameStatusEnum.won\n self.end_time = datetime.utcnow()", "def is_game_won(self):\n return True", "def is_ready_update(self):\n raise UnityTrainerException(\"The is_ready_update method was not implemented.\")", "def ready(self):\n return self.counter > 0", "def check_if_won(self):\n if self.player_points > self.enemy_points:\n self.bHasWon = True\n else:\n self.bHasWon = False", "def CheckVictoryCondition(self):\n opponentVictory = True\n for char in self.screen.characters:\n if char.team == 1 and char.leader and not char.dead:\n opponentVictory = False\n if opponentVictory:\n self.screen.refresh()\n self.music.stop()\n sys.exit()\n\n for victory in self.victories:\n playerVictory = True\n nextLevel = victory['next_level']\n if victory['condition'] == 'destroy':\n for char in self.screen.characters:\n if not char.dead and char.team == 2:\n playerVictory = False\n elif victory['condition'] == 'kill leaders':\n for char in self.screen.characters:\n if not char.dead and char.team == 2 and char.leader:\n playerVictory = False\n if playerVictory:\n print('You win')\n if self.music:\n self.music.stop()\n self.screen.objects = []\n self.screen.tileEffects = []\n self = Level(self.screen, nextLevel)", "def isReady(self):\n return self._lowLevelIsReady()", "def maybe_start(self):\r\n\t\tif not [p for p in self.players if not p.ready]\\\r\n\t\t and len(self.players) == self.max_players \\\r\n\t\t and not self.started:\r\n\t\t\tself.start()", "def is_game_won(self):\n if self.game_is_tied():\n return False\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n if my_available_steps == 0 or opp_available_steps == 0:\n return True\n else:\n return False", "def has_won(board, player):\r\n return False", "def playable(self, cell):\n \n return self.cells[cell] is None", "async def on_world_start(self, data, connection):\n player = self.plugins['player_manager'].get_player_by_name(\n connection.player.name)\n if hasattr(player, 'seen_before'):\n return True\n else:\n self.background(self._new_player_greeter(connection))\n self.background(self._new_player_gifter(connection))\n player.seen_before = True\n return True", "def check_trying_using(self):\r\n if self.opportunity or 'key' in inventory:\r\n if self.rect.colliderect(player):\r\n music_acceptor.usingPortalSound()\r\n player.rect.x = random.randrange(75, WIDTH - 125)\r\n player.rect.y = random.randrange(25, HEIGHT - 100)", "def is_editor_ready(self):\r\n if self.editor_widget:\r\n window = self.editor_widget.window()\r\n if hasattr(window, 'is_starting_up') and not window.is_starting_up:\r\n return True", "def isLoaded(self):\n if self.video == None:\n return False\n return True", "def is_game_win(self):\n return not self.deck and not self.hand", "def is_on(self):\n return not self.ready", "def is_ready(self) -> bool:\n return self.build_progress == 1.0", "def is_ready(self):\n return self._is_ready()", "def issolved(self) -> bool:\n if not self._pile:\n return True\n for c_card in self._pile:\n if not c_card.visible:\n return False\n return True", "def is_ready(self):\n return self.__is_ready", "def has_won(board, player):\n return False", "def ready(self):\n return not self._wanted", "def ready(self):\n return True", "def runRequirements(self):\n ready = (self.user[\"Save\"] != \"\" and self.user[\"Video\"] != \"\") or self.img_exist\n return ready", "def valid(self):\n self.ships()\n self.height()\n self.width()\n self.first()\n self.ai()\n self.ui()", "def check_end(self, player):\n if all(tile.player==player for tile in board):\n board.draw()\n print(nl, self.winmsg % player)\n sys.exit()", "def __game_is_over(self):\n return not (self.__playing and self.__bricks_total > 0 and self.__num_lives > 0)", "def is_ready(self) -> bool:\n return self._ready.is_set()", "def check_for_game_won(self):\n all_moscuvites_captured = True\n king_captured = True\n king_escaped = True\n for piece in self.game_pieces:\n if piece.player == 2:\n all_moscuvites_captured = False\n elif piece.player == 3:\n king_captured = False\n king_coords = (piece.x,piece.y)\n escape_coords = [(0, 0), (0, 8),\n (8, 0), (8, 8)]\n if king_coords not in escape_coords:\n king_escaped = False\n if king_captured:\n return 2\n elif king_escaped or all_moscuvites_captured:\n return 1\n else:\n return 0", "def _can_render_now(self):\n # First check that no update events are pending.\n window = self._window\n if window._transition:\n return 0\n rgn = Qd.NewRgn()\n window._onscreen_wid.GetWindowUpdateRgn(rgn)\n ok = Qd.EmptyRgn(rgn)\n # Next check that we're topmost\n if ok:\n ok = window._is_on_top()\n Qd.DisposeRgn(rgn)\n return ok", "def player(self):\n legal = self.board.legal_move(self.black)\n if(len(legal) == 0):\n self.p_no_move = 1\n print(\"No legal move for player!\")\n self.computer_turn = True\n self.player_turn = False", "def board_tiles_availability(self):\n for row in range(GameData.rows):\n for col in range(GameData.columns):\n if self.board[row][col] == 0:\n return False\n # Game is draw, no more moves left!\n return True", "def is_ready(self):\n return self._is_ready", "def checkPlayerSelection(self):\n starting = True\n for button in self.model.buttons.sprites():\n if button.rect.collidepoint(mouse.get_pos()) and mouse.get_pressed()[0]:\n if button.function == \"1P\":\n self.model.playernum = 1\n starting = False\n else:\n self.model.playernum = 2\n starting = False\n\n\n for event in pygame.event.get():\n if event.type == KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n return \"Quit\"\n\n return starting", "def player_collision(self, player):\n return True", "def IsReady(self):\r\n\t\treturn self._get_attribute('isReady')", "def game_tie(self):\n\n shape = self.board.shape\n if np.count_nonzero(self.board) == (shape[0] * shape[1]):\n # The board is full\n player = 0\n return True\n else:\n return False", "def available(self):\n\t\t\treturn False", "def available(self):\n\t\t\treturn False", "def available(self):\n\t\t\treturn False", "def available(self):\n\t\t\treturn True", "def available(self):\n\t\t\treturn True", "def available(self):\n\t\t\treturn True", "def is_ready_for_website(self):\n return self.title != \"\" and self.poster_image_url != \"\" and self.trailer_youtube_url != \"\"", "def has_won(self):\n return len(self.hand) == 0", "def is_character_alive(self):\n return self.get_model.get_character.alive", "def if_ready(self, **kwargs):\n return True", "def updateComplete(self):\n self.livesScreen()\n if self.getWave().getLives() == 0:\n self.deathScreen()\n else:\n self.winScreen()", "def isReady(self):\n return self._state in self._ReadyStates", "def is_ready(self):\n return self.prep_job.is_done()", "def is_ready(self):\n return len(self.unresolved_placeholders) == 0", "def is_ready(self, chunk_i):\n return True", "def is_ready(self):\n ready = True\n for browser in self.browsers:\n if 'exe' in self.browsers[browser]:\n exe = self.browsers[browser]['exe']\n if not os.path.isfile(exe):\n logging.critical(\"Browser executable is missing for %s: '%s'\", browser, exe)\n ready = False\n return ready", "def isReady(self):\n\t\twhile self.osc.trigger_state() != \"save\":\n\t\t\ttime.sleep(.1)\n\t\treturn True", "def set_player_ready(self, player_id):\n player = list(filter(lambda p: p.id() == player_id, self.players))\n\n if len(player):\n player[0].ready = True", "def game_on(self):\n doc = self.documentation\n return (self.draw.accepted or doc[len(doc)-1].accepted) and (self.board.stones_set < self.board.max_nr_stones) and (self.board.score[opponent(self.draw.player)] > 0)", "def outOfScreen(self):\n x,y = self.currentLevel.transformToScreenCoordinate(self.position)\n w,h = cblocals.GAME_SCREEN_SIZE\n if x<0 or y<0 or x>x or y>h:\n return True\n return False", "def checkGameComplete(self):\n for rowKey in self.table:\n for ele in self.table[rowKey]:\n if type(ele) == int:\n return False # means not complete\n return True", "def isGameOver(self):\n for i in range(self.rows):\n for j in range(self.columns):\n if self.grid[i][j].face == 'down':\n return False\n #if here then all cards must be face up\n return True", "def is_ready(self):\n if self.producer is None:\n return False\n return True", "def ready(self):\n if self.status == self.STATUS_NEED_FORCED:\n return True\n elif self.airdate and self.status in (self.STATUS_NEED, self.STATUS_NONE):\n return self.aired and not self.obsolete and self.season.number != 0\n else:\n return False", "def is_game_over(self):\n\n if len(self.next_pieces) == 0:\n return True", "def check_game_over(self):\n for piece in self.pieces:\n if not piece.destroyed:\n return False\n print(\"Signal.END\")\n return True", "def window_ready(self):\n raise NotImplementedError", "def is_complete(self):\n return self.winner is not None", "def __collision_sprite(self) -> bool:\n if pygame.sprite.spritecollide(self.player.sprite,self.pipes,False):\n return True\n else: \n return False", "def isOpen(self):\n\t\treturn not self.endgame", "def check_for_end_of_game(self):\n return self.player_1.score + self.player_2.score >= self.number_of_cells", "def isstart(self) -> bool:\n if len(self._pile) != self._pos + 1:\n return False\n visible_count = 0\n hidden_count = 0\n for c_card in self._pile:\n if c_card.visible:\n visible_count += 1\n else:\n hidden_count += 1\n return hidden_count == self._pos and visible_count == 1", "def wait_until_ready(self):\n while not self.is_ready():\n time.sleep(0.01)", "def player_win(self):\n global chips\n global placed_bet\n\n chips = (self.final_bet*2 + chips)\n self.victory = True\n placed_bet = False", "def game_won(self):\n return all((foundation.is_full() for foundation in self.foundations.values()))", "def available(self) -> bool:\n return True", "def available(self) -> bool:\n return True", "def playerCanPlay(game, situation, player):\r\n return True", "def is_full(self):\n return set(self._parent.letters()) == set(self.winners())", "def is_readytoserve(self):\n isreadytoserve = True\n if (not self.comp('packmanager').is_readytoserve()):\n isreadytoserve = False\n return isreadytoserve" ]
[ "0.69993716", "0.6624655", "0.6611897", "0.6352339", "0.6298705", "0.62477887", "0.6227472", "0.6222125", "0.6217452", "0.6090686", "0.607338", "0.6063791", "0.6063283", "0.605453", "0.60500675", "0.6024416", "0.6018847", "0.5996054", "0.5977044", "0.5920064", "0.5918121", "0.591741", "0.5909322", "0.5895338", "0.5852375", "0.5848146", "0.5845665", "0.58281004", "0.5825527", "0.58065075", "0.5799788", "0.5797705", "0.5794805", "0.57888895", "0.5720109", "0.5716932", "0.5711013", "0.5704024", "0.56896466", "0.56844187", "0.5673604", "0.5666981", "0.5664996", "0.5662668", "0.5657615", "0.565225", "0.5639963", "0.562464", "0.561637", "0.56132555", "0.56117845", "0.56082547", "0.5607778", "0.5602532", "0.5601597", "0.5595654", "0.55821896", "0.5578387", "0.55747986", "0.55715334", "0.5566075", "0.5566075", "0.5566075", "0.55597216", "0.55597216", "0.55597216", "0.5552726", "0.55512166", "0.55502176", "0.5546018", "0.55413", "0.5540481", "0.55307615", "0.5529578", "0.55290264", "0.55283415", "0.55282027", "0.55239236", "0.55158204", "0.5515512", "0.5511466", "0.550938", "0.5508712", "0.5505824", "0.5499167", "0.5499085", "0.54969454", "0.5489706", "0.54802036", "0.5472301", "0.54720634", "0.5469931", "0.5462965", "0.5454249", "0.54536074", "0.54499483", "0.54499483", "0.5448189", "0.5446945", "0.5443951" ]
0.6954562
1
Run the script at given path catching exceptions. This function should only be used internally by Pyto.
def runScriptAtPath(path): sys.argv = [path] for arg in PytoClasses.Python.shared.args: sys.argv.append(str(arg)) def run() -> None: os.system = PytoClasses.Python.shared.system directory = os.path.expanduser(os.path.dirname(path)) sys.path.insert(0, directory) try: global __script__ spec = importlib.util.spec_from_file_location("__main__", path) __script__ = importlib.util.module_from_spec(spec) spec.loader.exec_module(__script__) PytoClasses.Python.shared.values = [item for item in dir(__script__) if not item.startswith("__")] except SystemExit: print("SystemExit") except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() extracts = traceback.extract_tb(sys.exc_info()[2]) count = len(extracts) lineNumber = -1 fileName = path for i, extract in enumerate(extracts): if extract[0] == fileName: lineNumber = extract[1] break count -= 1 if (type(e) == SyntaxError): # The last word in a `SyntaxError` exception is the line number lineNumber = [int(s) for s in (str(e)[:-1]).split() if s.isdigit()][-1] PytoClasses.Python.shared.errorType = exc_type.__name__ PytoClasses.Python.shared.errorReason = str(e) PytoClasses.EditorViewController.visible.showErrorAtLine(lineNumber) print(traceback.format_exc(limit=-count)) sys.path.remove(directory) PytoClasses.ReviewHelper.shared.launches = PytoClasses.ReviewHelper.shared.launches+1 PytoClasses.ReviewHelper.shared.requestReview() PytoClasses.Python.shared.isScriptRunning = False thread = threading.Thread(target=run, args=()) def loop(): while PytoClasses.Python.shared.isScriptRunning: time.sleep(1) ignoredThreads.append(thread) raise Exception("Stopped script!") def runLoop(): try: loop() except: pass thread.start() runLoop() return __script__
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runScript(path=None):\n if path:\n exec(compile(open(path, \"rb\").read(), path, 'exec'))", "def do_exec(self, arg):\n self.run_file(arg['path'])", "def _run_file(file_path, globals_):\n script_name = os.path.basename(file_path)\n\n sys.path = (_PATHS.script_paths(script_name) +\n _PATHS.scrub_path(script_name, sys.path))\n\n fix_google_path()\n\n execfile(_PATHS.script_file(script_name), globals_)", "async def _run_script(self, path: Path) -> None:\n with open(path, 'r') as f:\n self.conn.executemany(f.read())", "def run_file(file_path, globals_, script_dir=SCRIPT_DIR):\n fix_sys_path()\n script_name = os.path.basename(file_path)\n script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)\n script_path = os.path.join(script_dir, script_name)\n print script_path\n execfile(script_path, globals_)", "def do_run_script(self, arg):\n try:\n with open(os.path.join(os.getcwd(), arg), 'r') as fin:\n script = fin.readlines()\n for line in script:\n self.onecmd(line)\n except (FileNotFoundError) as exc:\n print(exc)", "def run_call(path: Path) -> None:\n if not (path / \"__main__.py\").exists():\n return\n try:\n subprocess.check_call([sys.executable, path.as_posix()], stdout=subprocess.DEVNULL)\n except subprocess.CalledProcessError as e:\n raise SnapshotMismatchError(f\"Path {path} cannot be imported: {e}\") from None", "def script_test(path):\n log.info(\" ... EXECUTING {}\".format(str(path)))\n\n cmd = [sys.executable, str(path)]\n cp = subprocess.run(cmd, stderr=subprocess.PIPE)\n if cp.returncode:\n log.info(\" ... FAILED\")\n log.info(\" ___ TRACEBACK\")\n log.info(cp.stderr.decode(\"utf-8\") + \"\\n\\n\")\n return False\n else:\n log.info(\" ... PASSED\")\n return True", "def run(path):\n # https://github.com/friendlycode/grparks/issues/20\n print(\"TODO: modify file here\")\n print(path)", "def run_script (script, *l) :\n if not os.path.exists (script) :\n raise PQHException (\"file %s not found\" % script)\n py = get_interpreter_path ()\n cmd = \"%s %s\" % (py, script)\n if len (l) > 0 :\n cmd += \" \" + \" \".join ( [str (x) for x in l])\n out,err = run_cmd (cmd)\n return out,err", "def open_script(script_path):\n pass", "def execute(file_path):\n os.startfile(file_path)", "def runScript(self, script):\n data = FilePath(__file__).parent().child('data')\n sample_file = data.child('1.input.ofx')\n\n args = (script, [sample_file.path])\n log.msg('executing %r' % (args,))\n out, err, rc = yield utils.getProcessOutputAndValue(*args, env=None)\n log.msg('rc: %r' % (rc,))\n log.msg('out: %r' % (out,))\n log.msg('err: %r' % (err,))\n if rc != 0:\n self.fail(\"Failed: %s\\n\\n%s\" % (out, err))", "def run_script(self):\n pass", "def PyHiew_ExecuteScript(script, g, strip_path = False):\r\n PY_COMPILE_ERR = None\r\n try:\r\n execfile(script, g)\r\n except Exception, e:\r\n PY_COMPILE_ERR = str(e) + \"\\n\" + traceback.format_exc()\r\n PY_COMPILE_ERR = PY_COMPILE_ERR.replace(\r\n script[:-len(os.path.basename(script))],\r\n '')\r\n if PYHIEW_SHOW_EXEC_ERRORS:\r\n MessageBox(PY_COMPILE_ERR)\r\n\r\n return PY_COMPILE_ERR", "def run_script(self, params, config_no):\n raise NotImplementedError()", "def run(self, script, *args, **kwargs):\n return self._run('run', script, *args, **kwargs)", "def exec_script(self, script):\n filename = os.path.join(self.script_dir, script + \".sh\")\n # http://docs.python.org/library/os.html#os.X_OK\n if os.access(filename, os.X_OK):\n with open(filename):\n subprocess.call(filename)\n self.vibrate(0.1)", "def RunCmdFile(self, path):\n if not self.sim42interp == self.shell.interp:\n self.UseCommandInterface(True)\n self.shell.run(\"read \" + path, prompt=0, verbose=0)", "def run_setup_script(self, script_path):\n try:\n f = open(script_path, 'r')\n setup_script = f.read()\n # print(setup_script)\n c = self.conn.cursor()\n c.executescript(setup_script)\n except (Error, IOError) as e:\n print('[Datanase] Error:')\n print(e)", "def run_script(self, pathname, caller=None):\n self.msg(2, \"run_script\", pathname)\n\n pathname = os.path.realpath(pathname)\n m = self.findNode(pathname)\n if m is not None:\n return m\n\n if sys.version_info[0] != 2:\n with open(pathname, 'rb') as fp:\n encoding = util.guess_encoding(fp)\n\n with open(pathname, _READ_MODE, encoding=encoding) as fp:\n contents = fp.read() + '\\n'\n if contents.startswith(BOM):\n # Ignore BOM at start of input\n contents = contents[1:]\n\n else:\n with open(pathname, _READ_MODE) as fp:\n contents = fp.read() + '\\n'\n\n co_ast = compile(contents, pathname, 'exec', ast.PyCF_ONLY_AST, True)\n co = compile(co_ast, pathname, 'exec', 0, True)\n m = self.createNode(Script, pathname)\n self._updateReference(caller, m, None)\n self._scan_code(m, co, co_ast)\n m.code = co\n if self.replace_paths:\n m.code = self._replace_paths_in_code(m.code)\n return m", "def exec_file(self, path):\n assert os.path.isabs(path)\n\n source = None\n\n try:\n with open(path, 'rt') as fd:\n source = fd.read()\n except Exception as e:\n raise SandboxLoadError(self._context.source_stack,\n sys.exc_info()[2], read_error=path)\n\n self.exec_source(source, path)", "def execute(self, code, environment = dict()):\r\n if not self.config.get('scripting', 'enable') and type(code) == str:\r\n self.send(code, log = False)\r\n else:\r\n if type(code) == str:\r\n c = compile(code, 'errors.log', 'exec')\r\n else:\r\n c = code\r\n eval(c, self.getEnvironment(environment))", "def call_script(self, script):\n filename, callable = script.rsplit(':', 1)\n filename = os.path.abspath(filename)\n module = imp.load_source('script', filename)\n script = getattr(module, callable.strip())\n\n try:\n script(self.options, self.buildout, self.augmented_environment())\n except TypeError:\n # BBB: Support hook scripts that do not take the environment as\n # the third parameter\n script(self.options, self.buildout)", "def exec_file(path: str, global_vars: Dict[str, Any]) -> None:\n with open(path) as file:\n exec(compile(file.read(), path, \"exec\"), global_vars) # pylint: disable=exec-used", "def run(filename):\n try:\n with open(filename) as f:\n interp.runcode(f.read())\n except IOError as e:\n self.perror(e)", "def run_workdir(self, path):\n pass", "def run(path):\n config = conf.get_yaml_field(gl.configFile)\n exe_con = config['ENABLE_EXECUTION']\n exe_num = config['EXECUTION_NUM']\n rerun = config['ENABLE_RERUN']\n reruns_nums = config['RERUN_NUM']\n repeat = config['ENABLE_REPEAT']\n repeat_num = config['REPEAT_NUM']\n exec_mode = config['ENABLE_EXEC_MODE']\n debug_mode = config['ENABLE_DEBUG_MODE']\n last_failed = config['ENABLE_LAST_FAILED']\n failed_first = config['ENABLE_FAILED_FIRST']\n\n # custom function\n RunTestCase.copy_custom_function()\n\n # failed first\n failed_first_args = (' --ff ' if failed_first else '') if not last_failed else ''\n\n # last failed\n last_failed_args = (' --lf ' if last_failed else '') if not failed_first else ''\n\n # Enable repeat case.\n repeat_args = ' --count={} '.format(repeat_num) if repeat else ''\n\n # Enable CPU concurrency\n py_args = ' -n {} '.format(exe_num) if exe_con else ''\n\n # Enable failed retry\n reruns_args = ' --reruns {} '.format(reruns_nums) if rerun else ''\n\n # debug mode print debug info.\n debug = '' if debug_mode else '--tb=no'\n\n \"\"\"\n Load the pytest framework,\n which must be written here or DDT will be loaded first.\n from httptesting.case import test_load_case\n \"\"\"\n case_path = gl.loadcasePath\n # Output mode console or report.\n if exec_mode:\n cmd = 'cd {} && py.test -q -s {} {} {} {}'.format(\n case_path, reruns_args, 'test_load_case.py',\n repeat_args, debug\n )\n else:\n cmd = 'cd {} && py.test {} {} {} {} {} {} --html={} {} --self-contained-html'.format(\n case_path,\n py_args,\n reruns_args,\n last_failed_args,\n failed_first_args,\n 'test_load_case.py',\n repeat_args,\n path,\n debug\n )\n try:\n os.system(cmd)\n except (KeyboardInterrupt, SystemExit):\n print('已终止执行.')", "def main():\n if os.path.isdir(path):\n for filename in os.listdir(path):\n if filename.endswith('.asm'):\n execute_asm_file(path + '/' + filename, filename)\n else:\n execute_asm_file(path, path[path.rfind(\"/\") + 1:])", "def run_script(func):\n try:\n sys.exit(func(sys.argv[1:], STDIN, sys.stdout))\n except KeyboardInterrupt:\n logger.error(\"Interrupted\")\n sys.exit(EXIT_ERROR)", "def runScript(self, script):\n c = self\n game = self.game\n app = self.game.app\n shell = self.shell\n sprite = self.sprite\n s = shell\n self = self.env\n exec(open(\"script/\" + script).read())", "def exec_python_script(self, filepath=False, script_txt=False):\n if script_txt is False and type(filepath) is str:\n with open(filepath, 'r') as file_:\n script_txt = file_.read()\n \n elif type(script_txt) is str and filepath is False:\n filepath = \"inline-script\"\n \n else:\n SystemError(\"'exec_python_script' function used incorrectly!\"\n +\" Choose either script_txt or filepath\")\n\n # Declare all the variables in the global scope so the user can use them\n _vars = {var_name: getattr(self, var_name) for var_name in self.variables}\n\n\n # Run the script in a try loop\n try:\n exec(script_txt, _vars)\n except Exception as e:\n err_msg = repr(e)\n if hasattr(e, 'txt'):\n err_msg = \"Error in your python code.\\n\\n\"+f\"Script: {filepath}\" + \"\\n\"\n if hasattr(e, \"lineno\"):\n err_msg += f\"Bad Line: {e.text}\" + \"\\n\" + f\"Line Num: {e.lineno}\"\n err_msg += \"\\nError Msg: \" + f\"{e.msg}\"\n\n ltxt = script_txt.split(\"\\n\")\n if hasattr(e, \"lineno\"):\n ltxt[e.lineno-1] += \" <------- BAD LINE\"\n err_msg += \"\\n\\n\\n\\n\\n\\nPython Script:\\n\" + '\\n'.join(ltxt)\n\n self.print_error(err_msg)\n\n for var_name in _vars:\n setattr(self, var_name, _vars[var_name])\n if var_name not in self.variables: self.variables.append(var_name)", "def run(self):\n try:\n self.runCommand()\n except TortugaException as ex:\n print(ex.getErrorMessage())\n raise SystemExit(ex.getErrorCode())\n except SystemExit:\n raise\n except Exception as ex:\n print(str(ex))\n raise SystemExit(-1)", "def runScript(*args, **kwargs):\n env = os.environ.copy()\n env['PYTHONPATH'] = os.pathsep.join(sys.path)\n return chromium_utils.RunCommand(*args, env=env, **kwargs)", "def run(path, cmd):\n logging.info('Processing %s', path)\n logging.debug('Running: %s', ' '.join(cmd))\n subprocess.call(cmd)", "def run_file(self, fpath):\n with open(fpath, \"r\", encoding=\"utf-8\") as fin:\n return self.run_commands(fin.read())", "def _do_run(self, path, args):\n try:\n self.router.route(path, args)\n except TypeError, e:\n # To catch the follow errors\n # TypeError: xxxx got an unexpected keyword argument 'k'\n # TypeError: 'print_my_good() takes at least 1 argument (0 given)'\n print \"run job %s with arg < %s > error:\" % (path, \", \".join(args))\n print \"%s\" % e", "def test_script_integrity(capsys):\n script = os.path.abspath(\"examples/scikitlearn-iris/main.py\")\n\n return_code = subprocess.call([\"python\", script, \"0.1\"])\n\n assert return_code != 2, \"The example script does not exists.\"\n assert return_code != 1, \"The example script did not terminates its execution.\"\n assert (\n return_code == 0 and not capsys.readouterr().err\n ), \"The example script encountered an error during its execution.\"", "def run(self):\r\n try:\r\n file_path = os.path.join(self.temp, \"debugtalk.py\")\r\n loader.FileLoader.dump_python_file(file_path, self.__code)\r\n self.resp = decode(subprocess.check_output([EXEC, file_path], stderr=subprocess.STDOUT, timeout=60))\r\n\r\n except subprocess.CalledProcessError as e:\r\n self.resp = decode(e.output)\r\n\r\n except subprocess.TimeoutExpired:\r\n self.resp = 'RunnerTimeOut'\r\n\r\n shutil.rmtree(self.temp)", "def run_execute_file(file_path, globals=None, locals=None):\n if globals is None:\n globals = {}\n globals.update({\n \"__file__\": file_path,\n \"__name__\": \"__main__\",\n })\n with open(file_path, 'rb') as file:\n exec(compile(file.read(), file_path, 'exec'), globals, locals)", "def run_mypy(path: Path) -> None:\n try:\n output = subprocess.check_output(\n [sys.executable, \"-m\", \"mypy\", path.as_posix()],\n stderr=subprocess.STDOUT,\n encoding=\"utf8\",\n )\n except subprocess.CalledProcessError as e:\n output = e.output\n errors = []\n for message in output.splitlines():\n if not message or message.startswith(\"Found\"):\n continue\n if any(imsg in message for imsg in IGNORE_MYPY_ERRORS):\n continue\n errors.append(message)\n\n if errors:\n raise SnapshotMismatchError(\"\\n\".join(errors)) from None", "def run_as_script(scenario_path=None):\n import cea.globalvar\n gv = cea.globalvar.GlobalVariables()\n\n if scenario_path is None:\n scenario_path = gv.scenario_reference\n\n locator = cea.inputlocator.InputLocator(scenario_path=scenario_path)\n weather_file = locator.get_default_weather()\n moo_optimization(locator=locator, weather_file= weather_file, gv=gv)\n\n print 'test_optimization_main() succeeded'", "def run_python_script(package=None, module=None, args=[], p_args=[]):\n assert module is not None\n assert isinstance(args, (tuple, list)) and isinstance(p_args, (tuple, list))\n path = python_script_exists(package, module)\n run_program(sys.executable, p_args + [path] + args)", "def run_file(self, user_input):\n # Extract the important information\n self.path, self.name = self.extractor.extract_program_information(user_input)\n\n # Determine what language the program is\n program_type = self.determine_program_type(path, name)\n\n # If the file is python, run it the specific way\n # @TODO: Make it work without shell=True\n if program_type == \"python\":\n subprocess.Popen(\"python \" + self.path + self.name, shell=True)", "def execute(self, args=\"\"):\r\n return super(PythonScript, self).execute(_EXECUTABLE, args)", "def __try_exec_line(self, line: Text) -> None:\n try:\n exec(line, self.vars)\n except Exception as err:\n print(f'Issue during execution of setup: {err}')\n print(f'Line was: {line}')\n return # TODO: does this stop the process??", "def run(self):\n # Transform paths in absolute paths since we'll change the working directory\n input_files = {local + os.path.splitext(path)[1]: os.path.abspath(path)\n for local, path in listitems(self._file_paths) if 'moli' in local}\n output_files = {local + os.path.splitext(path)[1]: os.path.abspath(path)\n for local, path in listitems(self._file_paths) if 'molo' in local}\n\n # Resolve all the names in the script\n local_files = {local: local + os.path.splitext(path)[1]\n for local, path in listitems(self._file_paths)}\n script = self._script.format(**local_files) + 'quit\\n'\n\n with mdtraj.utils.enter_temp_directory():\n # Copy input files\n for local_file, file_path in listitems(input_files):\n shutil.copy(file_path, local_file)\n\n # Save script and run tleap\n with open('leap.in', 'w') as f:\n f.write(script)\n leap_output = subprocess.check_output(['tleap', '-f', 'leap.in']).decode()\n\n # Save leap.log in directory of first output file\n if len(output_files) > 0:\n #Get first output path in Py 3.X way that is also thread-safe\n for val in listvalues(output_files):\n first_output_path = val\n break\n first_output_name = os.path.basename(first_output_path).split('.')[0]\n first_output_dir = os.path.dirname(first_output_path)\n log_path = os.path.join(first_output_dir, first_output_name + '.leap.log')\n shutil.copy('leap.log', log_path)\n\n # Copy back output files. If something goes wrong, some files may not exist\n error_msg = ''\n try:\n for local_file, file_path in listitems(output_files):\n shutil.copy(local_file, file_path)\n except IOError:\n error_msg = \"Could not create one of the system files.\"\n\n # Look for errors in log that don't raise CalledProcessError\n error_patterns = ['Argument #\\d+ is type \\S+ must be of type: \\S+']\n for pattern in error_patterns:\n m = re.search(pattern, leap_output)\n if m is not None:\n error_msg = m.group(0)\n break\n\n if error_msg != '':\n raise RuntimeError(error_msg + ' Check log file {}'.format(log_path))\n\n # Check for and return warnings\n return re.findall('WARNING: (.+)', leap_output)", "def open_program(path):\r\n os.startfile(path)", "def test_works(self):\n script = FilePath(__file__).parent().parent().parent() \\\n .child('scripts').child('parsefin').path\n\n return self.runScript(script)", "def _execute(script, prefix=None, path=None):\n path = tempfile.gettempdir() if path is None else path\n result = 1\n try:\n fh = tempfile.NamedTemporaryFile('w', delete=False)\n fh.write(script)\n fh.close()\n print('Executing script below with cwd=%s\\n{{{\\n%s\\n}}}\\n' %\n (path, script))\n try:\n os.chmod(fh.name, stat.S_IRWXU)\n env = os.environ.copy()\n if prefix is not None:\n env['COLCON_BUNDLE_INSTALL_PREFIX'] = prefix\n result = subprocess.run(\n fh.name, cwd=path, env=env, stdout=PIPE, stderr=PIPE,\n universal_newlines=True)\n if result.stdout is not None:\n logger.debug('stdout output: \\n' + result.stdout)\n if result.stderr is not None:\n logger.warn('stderr output: \\n' + result.stderr)\n except OSError as ex:\n print('Execution failed with OSError: %s' % ex)\n finally:\n if os.path.exists(fh.name):\n os.remove(fh.name)\n logger.info('Return code was: %s' % result)\n return result.returncode == 0", "def pyscript(fp, **context):\n try:\n exec fp in context\n except SystemExit:\n pass\n return context['response']", "def test_script(self) -> None:\n main()", "def compile_run(\n path,\n host,\n params={}\n ):\n\n compiled_path = MyCLI.compile(path)\n MyCLI.run(compiled_path, host, params)", "def run_script(input_file, run_dir, script_name, interpreter='python'):\n from paver.runtime import sh\n from paver.path import path\n docdir = path(input_file).dirname()\n output_text = sh('cd %(docdir)s/%(run_dir)s;%(interpreter)s %(script_name)s 2>&1' % vars(),\n capture=True)\n response = '\\n::\\n\\n\\t$ %(interpreter)s %(script_name)s\\n\\t' % vars()\n response += '\\n\\t'.join(output_text.splitlines())\n while not response.endswith('\\n\\n'):\n response += '\\n'\n return response", "def main() -> None:\n try:\n run()\n except errors.BaseError as e:\n sys.stderr.write(f'{str(e)}\\n')\n sys.exit(e.code)", "def run_script(script_path, cwd='.'):\n run_thru_shell = sys.platform.startswith('win')\n if script_path.endswith('.py'):\n script_command = [sys.executable, script_path]\n else:\n script_command = [script_path]\n\n utils.make_executable(script_path)\n\n try:\n proc = subprocess.Popen(script_command, shell=run_thru_shell, cwd=cwd) # nosec\n exit_status = proc.wait()\n if exit_status != EXIT_SUCCESS:\n raise FailedHookException(\n f'Hook script failed (exit status: {exit_status})'\n )\n except OSError as err:\n if err.errno == errno.ENOEXEC:\n raise FailedHookException(\n 'Hook script failed, might be an empty file or missing a shebang'\n ) from err\n raise FailedHookException(f'Hook script failed (error: {err})') from err", "def execute(self):\n teardown_verbosity = self._vars.PEX_TEARDOWN_VERBOSE\n try:\n with self.patch_sys():\n working_set = self._activate()\n TRACER.log('PYTHONPATH contains:')\n for element in sys.path:\n TRACER.log(' %c %s' % (' ' if os.path.exists(element) else '*', element))\n TRACER.log(' * - paths that do not exist or will be imported via zipimport')\n with self.patch_pkg_resources(working_set):\n self._wrap_coverage(self._wrap_profiling, self._execute)\n except Exception:\n # Allow the current sys.excepthook to handle this app exception before we tear things down in\n # finally, then reraise so that the exit status is reflected correctly.\n sys.excepthook(*sys.exc_info())\n raise\n except SystemExit as se:\n # Print a SystemExit error message, avoiding a traceback in python3.\n # This must happen here, as sys.stderr is about to be torn down\n if not isinstance(se.code, int) and se.code is not None:\n print(se.code, file=sys.stderr)\n raise\n finally:\n # squash all exceptions on interpreter teardown -- the primary type here are\n # atexit handlers failing to run because of things such as:\n # http://stackoverflow.com/questions/2572172/referencing-other-modules-in-atexit\n if not teardown_verbosity:\n sys.stderr.flush()\n sys.stderr = DevNull()\n sys.excepthook = lambda *a, **kw: None", "def main():\n arg0 = sys.argv[0]\n if not os.path.isfile(arg0):\n sys.exit(\"sys.argv[0] is not a path to a file: \\\"\" + str(arg0) + \"\\\". Exiting now.\")\n absolute_path_to_file = os.path.realpath(arg0) # realpath follows symlinks, which is what we want in this case.\n absolute_path_to_src = os.path.dirname(absolute_path_to_file)\n (absolute_path_to_repo, src_dirname) = os.path.split(absolute_path_to_src)\n if src_dirname != \"src\":\n sys.exit(\"The driver script should be located in directory \\\"src\\\". It is instead in \\\"\" + src_dirname + \"\\\". Exiting now.\")\n os.chdir(absolute_path_to_repo)", "def run_script(self, script_name, script_args=None, node_paths=None):\n # TODO: consider add a pants.util function to manipulate command line.\n package_manager_args = self._get_run_script_args()\n package_manager_args.append(script_name)\n if script_args:\n package_manager_args.append('--')\n package_manager_args.extend(script_args)\n return self.run_command(args=package_manager_args, node_paths=node_paths)", "def test_script(self):\n path = Template().get_script()\n self.assertTrue(os.path.exists(path))", "def Non_VASP_Script(my_project):\n\n WORKFLOWS = my_project['Workflow']\n Workflow_Params = WORKFLOWS['Steps'][2]\n Workflow_name = Workflow_Params['NAME']\n job_dir = my_project['NAME'] + Workflow_Params['NAME']\n chkpt = job_dir + '.json'\n prev_filter = Workflow_Params['Continue']['Filter']\n prev_chkpt = Workflow_Params['Continue']['Source']\n Script = Workflow_Params['Script']\n executable = Script['Executable']\n non_arg_inputs = Script['NonArgInput']\n arg_inputs = Script['ArgInput']\n\n rerun_paths = continue_job_inputs(chkpt_files= prev_chkpt,\\\n user_filters=prev_filter)\n\n # Run the script now at the rerun_paths\n for r in rerun_paths:\n if inputs:\n shutil.copy(inputs, r)\n os.chdir(r)\n print ('Running {0} in {1}'.format(executable, r))\n script_output = sp.run([executable]+ arg_inputs, stdout=sp.PIPE).stdout.decode('utf-8')\n \n\n return None", "def run_script(self, filename=None, silent=False, set_focus=False):\r\n if filename is None:\r\n self.shell.restore_stds()\r\n filename = QFileDialog.getOpenFileName(self,\r\n self.tr(\"Run Python script\"), os.getcwdu(),\r\n self.tr(\"Python scripts\")+\" (*.py ; *.pyw)\")\r\n self.shell.redirect_stds()\r\n if filename:\r\n filename = unicode(filename)\r\n os.chdir( os.path.dirname(filename) )\r\n filename = os.path.basename(filename)\r\n self.emit(SIGNAL(\"refresh()\"))\r\n else:\r\n return\r\n command = \"execfile(%s)\" % repr(osp.abspath(filename))\r\n if set_focus:\r\n self.shell.setFocus()\r\n if self.dockwidget and not self.ismaximized:\r\n self.dockwidget.setVisible(True)\r\n self.dockwidget.raise_()\r\n if silent:\r\n self.shell.write(command+'\\n')\r\n self.shell.run_command(command)\r\n else:\r\n self.shell.write(command)", "def exec_from_inputfile(args):\n args.path = os.path.abspath(args.path)\n if not check(args.path, 'e'):\n clean_up(args.debug, args.folder, args.action, 1)\n\n logger.info(\"You are using the inputfile. All parameters other than folder, API key and debug will be ignored\")\n try:\n startargs = readconfig(args.path)\n makeconfig(*startargs[:13], date=args.today, folder=args.folder)\n\n r = Run('n', args.folder, args.debug)\n r.start()\n\n except TypeError:\n logger.critical(\"Wrong data format. Check the documentation\")\n clean_up(args.debug, args.folder, args.action, 1)", "def run_script(extension_invocation_info):\n acm.RunModuleWithParameters(__name__, acm.GetDefaultContext())", "def run_as_script(scenario_path=None):\n import cea.globalvar\n import cea.inputlocator as inputlocator\n\n gv = cea.globalvar.GlobalVariables()\n\n if scenario_path is None:\n scenario_path = gv.scenario_reference\n\n locator = inputlocator.InputLocator(scenario_path=scenario_path)\n total_demand = pd.read_csv(locator.get_total_demand())\n building_names = pd.read_csv(locator.get_total_demand())['Name']\n\n substation_main(locator, total_demand, total_demand['Name'], gv, False)\n\n print 'substation_main() succeeded'", "def test_exit_on_missing_file(self):\n with self.assertRaises(SystemExit):\n pyint = Interpreter()\n pyint.run(file=MISSING_FILE)", "def test_execute_and_import():\n code = dedent('''\n import os\n print os.path\n ''')\n results = ExecuteCode.execute_code(code)\n\n assert results != None\n assert results != ''", "def run_script(script_file: str, config_file: str, **kwargs: Any) -> None:\n # Add config path and current working directory to sys.path to correctly load the configuration\n script_filepath = Path(script_file)\n config_filepath = Path(config_file)\n sys.path.insert(0, script_filepath.resolve().parent.as_posix())\n sys.path.insert(0, config_filepath.resolve().parent.as_posix())\n sys.path.insert(0, os.getcwd())\n\n module = load_module(script_filepath)\n _check_script(module)\n\n run_fn = module.__dict__[\"run\"]\n\n # Lazy setup configuration\n config = ConfigObject(config_filepath, script_filepath=script_filepath)\n\n run_fn(config, **kwargs)", "def run(self, filePath = None):\n\n\t\t\tfileName = self._getFilePath(filePath = filePath)\n\t\t\ttry:\n\t\t\t\tos.startfile(fileName)\n\t\t\texcept AttributeError:\n\t\t\t\tsubprocess.call(['open', fileName])", "def invoke(self):\n self.exitCode = self.script()", "def RunScript(self):\r\n if (not os.path.isfile(self.configDict[\"xsiPath\"])):\r\n print \"XSI does not exist\"\r\n return True\r\n \r\n print (\"start running \" + os.path.basename(self.__script.name))\r\n returnValue = self.RunApplication(self.configDict[\"xsiPath\"] + \r\n \" -script \\\"\" + self.__script.name + \"\\\" > NUL 2>>&1\", \r\n self.__workingDir)\r\n \r\n if (returnValue == 0):\r\n print \"finished running \" + os.path.basename(self.__script.name)\r\n else:\r\n print \"crashed running \" + os.path.basename(self.__script.name)\r\n # since may not be able to do anything with the generated files\r\n return False \r\n \r\n # XSI generates unicode logs -- convert to UTF-8\r\n (encoder, decoder, reader, writer) = codecs.lookup(\"utf-16-le\")\r\n for logfile in self.__logFiles:\r\n logInMemory = \"\"\r\n log = reader(open(logfile))\r\n line = log.readline()\r\n while (line):\r\n logInMemory = logInMemory + line\r\n line = log.readline()\r\n log.close()\r\n logInMemory.encode(\"utf-8\")\r\n log = open(logfile, \"w\")\r\n log.write(logInMemory)\r\n log.close()\r\n \r\n for logfile in self.__importLogFiles:\r\n logInMemory = \"\"\r\n log = open(logfile)\r\n line = log.readline()\r\n while (line):\r\n warningFind = line.find(\"WARNING\")\r\n ctfPathFind = line.find(FXsi.__REPLACE_PATH)\r\n if ((warningFind != -1) and (ctfPathFind != -1)):\r\n line = line.replace(\"WARNING\", \"CTF_OVERRIDE\")\r\n logInMemory = logInMemory + line\r\n line = log.readline()\r\n log.close()\r\n log = open(logfile, \"w\")\r\n log.write(logInMemory)\r\n log.close()\r\n \r\n # since XSI has to save in its own project tree, need to relocate files\r\n for projectPath, testProcedurePath in self.__pathMap:\r\n for entry in os.listdir(projectPath):\r\n shutil.move(os.path.join(projectPath, entry), \r\n os.path.join(testProcedurePath, entry))\r\n \r\n return True", "def sub_process(path, student_name, course_name, block_id) :\n\t\n\tcommand = ['python', '../lib/python2.7/site-packages/eyeGaze.py', path, student_name, course_name, block_id]\n\tprocess_call = subprocess.call(command)", "def run(self):\n\n pwd = self.chdir()\n if pwd is None: return -1\n res = mkstuff.run_cmd(self.bindir + '/' + self.func + ' ' + self.args)\n os.chdir(pwd)\n return res", "def run_script(input_file, script_name, interpreter='python'):\r\n from paver.easy import sh\r\n from paver.path import path\r\n rundir = path(input_file).dirname()\r\n output_text = sh('cd %(rundir)s && %(interpreter)s %(script_name)s 2>&1' % vars(), capture=True)\r\n response = '\\n::\\n\\n\\t$ %(interpreter)s %(script_name)s\\n\\t' % vars()\r\n response += '\\n\\t'.join(output_text.splitlines())\r\n while not response.endswith('\\n\\n'):\r\n response += '\\n'\r\n return response", "def run_file(filename, logfile=None, execdir=None):\n if not runpy_available: #pragma:nocover\n raise pyutilib.common.ConfigurationError(\"Cannot apply the run_file() function because runpy is not available\") \n #\n # Open logfile\n #\n if not logfile is None:\n sys.stderr.flush()\n sys.stdout.flush()\n save_stdout = sys.stdout\n save_stderr = sys.stderr\n OUTPUT=open(logfile,\"w\")\n sys.stdout=OUTPUT\n sys.stderr=OUTPUT\n #\n # Add the file directory to the system path\n #\n if '/' in filename:\n tmp= \"/\".join((filename).split(\"/\")[:-1])\n tmp_import = (filename).split(\"/\")[-1]\n sys.path.append(tmp)\n elif '\\\\' in filename:\n tmp = \"\\\\\".join((filename).split(\"\\\\\")[:-1])\n tmp_import = (filename).split(\"\\\\\")[-1]\n sys.path.append(tmp)\n else:\n tmp_import = filename\n name = \".\".join((tmp_import).split(\".\")[:-1])\n #\n # Run the module\n #\n try:\n if not execdir is None:\n tmp=os.getcwd()\n os.chdir(execdir)\n tmp_path = sys.path\n sys.path = [execdir] + sys.path\n runpy.run_module(name,None,\"__main__\")\n if not execdir is None:\n os.chdir(tmp)\n sys.path = tmp_path\n except Exception: #pragma:nocover\n if not logfile is None:\n OUTPUT.close()\n sys.stdout = save_stdout\n sys.stderr = save_stderr\n raise\n #\n # Close logfile\n #\n if not logfile is None:\n OUTPUT.close()\n sys.stdout = save_stdout\n sys.stderr = save_stderr", "def runfile(self, s):\n return self.shell.ex(load_wrap(s, attach=False))", "def RunScript(code):\n with ScriptContext() as script_module:\n try:\n exec code in script_module.__dict__\n except:\n # Get exception output as close to exec as possible.\n # We don't take the first entry in the traceback because it just contains\n # \"exec\". Everything after that is the submitted code.\n try:\n etype, evalue, tb = sys.exc_info()\n traceback.print_exception(etype,\n evalue,\n tb.tb_next, # one frame up\n file=sys.stderr)\n finally:\n del tb # break circular references when using exc_info\n\n return sys.stdout.getvalue(), sys.stderr.getvalue()", "def main(args):\n\n # Get additional args for the script\n if len(args) > 1:\n scriptargs = args[1:]\n else:\n scriptargs = None\n\n scriptpath = find_script(args[0])\n\n if not scriptpath:\n print('\\nCannot find that script!: {}'.format(args[0]))\n return 1\n\n # Check for extra input needed.\n forgotargs = check_input(scriptpath, scriptargs)\n if forgotargs:\n if not scriptargs:\n scriptargs = forgotargs\n else:\n scriptargs.extend(forgotargs)\n # Shell script..\n shellret = shell_script(scriptpath, scriptargs)\n if shellret:\n print('wrun: script returned non-zero!: {}'.format(shellret))\n\n return shellret", "def scriptChecker(filename):\n if not os.path.exists(filename):\n print 'ERROR: %s does not exist' % filename\n import errno\n return errno.ENOENT\n\n # The script-checker program is called directly. If we call the code\n # from this python interpreter, any changes to an observing script will\n # not be noticed.\n #\n # This is due to the way python works: a second import statement of the\n # same module does nothing!\n import subprocess\n script = helpers.getCarmaBuildPath() + '/scripts/script-checker'\n cmd = [script, filename]\n ret = subprocess.call(cmd)\n if ret != 0:\n print 'ERROR: script-checker returned status code:', ret", "def execute_script(self, action, *args):\n self.host.cmd(('./%s' + len(args) * ' %s') % (action, *args))", "def _run_exact_solution(solution_dir, test=False):\n\n sys.path.append(solution_dir)\n # add to allow util.py import from day's directory\n sys.path.append(os.path.dirname(solution_dir))\n import solution\n\n if test:\n try:\n solution.run\n except AttributeError:\n solution_filepath = os.path.join(solution_dir, _SOLUTION_FILENAME)\n msg = \"The problem solution {0} does not contain a run() function!\"\n raise EnvironmentError(msg.format(solution_filepath))\n\n solution.test()\n\n # if we hit this, no exceptions, so success\n return \"Success!\"\n else:\n input_val = get_input_for_problem(solution_dir)\n return solution.run(input_val)", "def __call__(self, basepath: str, scriptpath: str) -> Process:\n ...", "def _run_python(self, pyscript, py_version='python'):\n return self.client_remote.run(args=[py_version, '-c', pyscript],\n wait=False)", "def _run_simulator(self):\n os.chdir(self.test_cases_path)\n\n simulator_config_filename = self.simulator_config_filename\n script, options = runner.parse_commands(simulator_config_filename)\n\n if sys.platform.startswith('win'):\n subprocess.call([script] + options, shell=True)\n else:\n subprocess.call([script] + options)\n\n os.chdir(self.this_file_path)", "def import_code(code_path: Optional[Union[Path, str]]) -> None:\n if code_path is not None:\n if not Path(code_path).exists():\n msg.fail(\"Path to Python code not found\", code_path, exits=1)\n try:\n import_file(\"python_code\", code_path)\n except Exception as e:\n msg.fail(f\"Couldn't load Python code: {code_path}\", e, exits=1)", "def run_import(path: Path) -> None:\n if not (path / \"__main__.py\").exists():\n return\n try:\n subprocess.check_call(\n [sys.executable, \"-m\", \"pip\", \"install\", \"--no-input\", path.parent.as_posix()],\n stdout=subprocess.DEVNULL,\n )\n if (path / \"__main__.py\").exists():\n subprocess.check_call(\n [sys.executable, \"-c\", f\"import {path.name}\"],\n stdout=subprocess.DEVNULL,\n )\n subprocess.check_call(\n [sys.executable, \"-m\", \"pip\", \"uninstall\", \"--no-input\", \"-y\", path.name],\n stdout=subprocess.DEVNULL,\n )\n except subprocess.CalledProcessError as e:\n raise SnapshotMismatchError(f\"Path {path} cannot be imported: {e}\") from None", "def run_module(self, path):\n\n module = self.import_module(path)\n result = None\n\n if module:\n try:\n result = module.run()\n except AttributeError:\n self.error('Error Running Module: Missing run() method.')\n except Exception:\n e = sys.exc_info()[1]\n traceback = sys.exc_info()[2]\n self.warning('Exeption caught in module: {0} line: {1}'.format(\n e,\n traceback.tb_lineno))\n self.calls.append({path: result})\n state.save_hook_call(path, result)\n return result", "def run_script(self, script, env=None, return_output=False):\n command = [\"/bin/sh\", \"-e\"]\n command.append(script)\n\n return self.run(command, env, return_output)", "def _run_script(fullname):\n name = posixpath.basename(fullname)\n if name[-3:] == '.py':\n name = name[:-3] # strip .py extension\n\n modname = [string.join(fullname.split('/')[0:-1],'/')]\n trylist = ((name, None), (name+'.py', None),\n (name, modname), (name+'.py', modname))\n\n # look for the module in standard locations, load it if you\n # find it, otherwise return 1\n for fname, path in trylist:\n try:\n if path:\n fp, pathname, description = imp.find_module(fname, path)\n else:\n fp, pathname, description = imp.find_module(fname)\n except ImportError:\n fp = None\n if fp:\n sys.argv[0] = pathname\n try:\n mod = imp.load_module('__main__', fp, pathname, description)\n finally:\n fp.close()\n return 1\n return 0", "def run(self):\n try:\n self._run()\n except Exception as err:\n # TODO: Do Task Failure to run exception handling\n pass", "def run(context, path=\"\"):\n common.success(f\"Tests {path} running \")\n return start.run_python(\n context,\n f\"-m pytest {path}\"\n )", "def run_file(self, value=None):\n self.save_file()\n self.p = Popen(\"./Project/myfile.py\", stdout=PIPE, stderr=PIPE)\n output, errors = self.p.communicate()\n self.my_output.delete(\"1.0\", END)\n self.my_output.insert(\"1.0\", output)\n if errors != \"\":\n print_to_log(errors)\n self.my_output.configure(fg=\"red\")\n else:\n self.my_output.configure(fg=\"white\")\n self.my_output.insert(\"1.0\", errors)", "def runscript(self, path):\n self.rpc.call(MsfRpcMethod.SessionMeterpreterScript, [self.sid, path])\n return self.read()", "def ExecuteScript(script):\n os.system(\"%s > /dev/null 2>&1\" % script)", "def do_pyscript(self, arg, opts=None):\n if not arg:\n self.perror(\"pyscript command requires at least 1 argument ...\", traceback_war=False)\n self.do_help('pyscript')\n return\n\n if not USE_ARG_LIST:\n arg = shlex.split(arg, posix=POSIX_SHLEX)\n\n # Get the absolute path of the script\n script_path = os.path.expanduser(arg[0])\n\n # Save current command line arguments\n orig_args = sys.argv\n\n # Overwrite sys.argv to allow the script to take command line arguments\n sys.argv = [script_path]\n sys.argv.extend(arg[1:])\n\n # Run the script - use repr formatting to escape things which need to be escaped to prevent issues on Windows\n self.do_py(\"run({!r})\".format(script_path))\n\n # Restore command line arguments to original state\n sys.argv = orig_args", "def run_gcode_file(self, path, **kwargs):\r\n return self._arm.run_gcode_file(path, **kwargs)", "def _run_script(fullname):\n name = posixpath.basename(fullname)\n if name[-3:] == '.py':\n name = name[:-3] # strip .py extension\n\n modname = [string.join(fullname.split('/')[0:-1],'/')]\n trylist = ((name, None), (name+'.py', None),\n (name, modname), (name+'.py', modname))\n\n # look for the modulate in standard locations, load it if you\n # find it, otherwise return 1\n for fname, path in trylist:\n try:\n if path:\n fp, pathname, description = imp.find_module(fname, path)\n else:\n fp, pathname, description = imp.find_module(fname)\n except ImportError:\n fp = None\n if fp:\n sys.argv[0] = pathname\n try:\n mod = imp.load_module('__main__', fp, pathname, description)\n finally:\n fp.close()\n return 1\n return 0", "def run(self):\n try:\n self.parse_args(None)\n self.execute_command()\n except FileExistsException, e:\n print \"Can't copy file as destination already exists.\"\n print \"Exiting...\"\n except Exception, e:\n print \"Exception occured: %s\\nExiting...\" % e", "def run_script_with_context(script_path, cwd, context):\n _, extension = os.path.splitext(script_path)\n\n with open(script_path, encoding='utf-8') as file:\n contents = file.read()\n\n with tempfile.NamedTemporaryFile(delete=False, mode='wb', suffix=extension) as temp:\n env = StrictEnvironment(context=context, keep_trailing_newline=True)\n template = env.from_string(contents)\n output = template.render(**context)\n temp.write(output.encode('utf-8'))\n\n run_script(temp.name, cwd)", "def run_script():\n # pylint: disable=unsupported-assignment-operation\n script_source.data['script'] = [inp_script.value]" ]
[ "0.72653073", "0.6912623", "0.68354243", "0.67718136", "0.67703193", "0.65939707", "0.6413898", "0.63070154", "0.61949843", "0.61937946", "0.6164765", "0.6151639", "0.60480416", "0.60265625", "0.60191596", "0.6006073", "0.5994495", "0.59873414", "0.5959248", "0.58620226", "0.5859723", "0.58492535", "0.5796345", "0.5794666", "0.57743543", "0.5767361", "0.57512784", "0.572017", "0.5686311", "0.5680942", "0.56612295", "0.56507105", "0.5648736", "0.5636002", "0.56233597", "0.56224227", "0.5614047", "0.5586191", "0.55745715", "0.5572075", "0.55717576", "0.5550373", "0.55480444", "0.5539774", "0.5538784", "0.5536208", "0.55325466", "0.55189264", "0.55176485", "0.5513116", "0.55130965", "0.5509538", "0.55078566", "0.5503161", "0.55003715", "0.5484156", "0.5455", "0.5452174", "0.5443917", "0.54387534", "0.54315877", "0.54080844", "0.5406283", "0.53957415", "0.5389169", "0.5388882", "0.5382392", "0.5372897", "0.53700256", "0.5366226", "0.5356897", "0.53565407", "0.53424084", "0.53405493", "0.53375256", "0.53316176", "0.5329205", "0.5328407", "0.53227645", "0.5321698", "0.5307784", "0.5304206", "0.52983683", "0.52953184", "0.52936727", "0.52872354", "0.52862215", "0.5277801", "0.52733916", "0.5263753", "0.5263567", "0.52472067", "0.5246504", "0.5244077", "0.5238478", "0.52296436", "0.5228479", "0.5224145", "0.5209982", "0.52029574" ]
0.7851395
0
Requests input with given prompt.
def input(prompt="Input"): __PyInputHelper__.userInput = None __PyInputHelper__.showAlertWithPrompt(prompt) while (__PyInputHelper__.userInput == None): if (threading.currentThread() in ignoredThreads): return "" continue userInput = __PyInputHelper__.userInput __PyInputHelper__.userInput = None return str(userInput)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def input(self, prompt):\r\n return console_input(prompt)", "def get_input(prompt):\n return input(prompt)", "def get_input(prompt):\n return input(prompt)", "def ask_user_input(prompt: str) -> str:\n return input(prompt)", "def ask(self, prompt: str) -> str:\n raise NotImplementedError", "def get_input(prompt):\n # type: (str) -> str\n return raw_input(prompt)", "def ask_input(self, prompt):\n self._vim.command('call inputsave()')\n self._vim.command('let user_input = input(\"{} \")'.format(prompt))\n self._vim.command('call inputrestore()')\n response = self._vim.eval('user_input')\n self._vim.command('unlet user_input')\n return response", "def textinput(self, title, prompt):\n return simpledialog.askstring(title, prompt)", "def input_helper(prompt):\n if version_info[0] == 2:\n # python2 input is scary - we want raw_input\n return raw_input(prompt)\n else:\n return input(prompt)", "def get_input(prompt):\n try:\n try:\n return raw_input(prompt)\n except NameError:\n return input(prompt)\n except EOFError:\n return ''", "def prompt_base(prompt):\n return input(prompt + \": \")", "def raw_input(self, prompt=''):\r\n \r\n newQueue = Queue()\r\n \r\n self.alert.append(newQueue)\r\n \r\n def requestItem(prompt=''):\r\n out = newQueue.get()\r\n return out\r\n \r\n return requestItem", "def request_input(self, possibles=[]):\n answer = self.console.input('Type your request here:')\n if len(possibles) > 0 and self.numeric:\n invalid = True\n while invalid:\n try:\n answer = int(answer)\n invalid = False\n break\n except:\n answer = self.console.input('Type your request here (numbers only):')\n\n answer = possibles[answer - 1]\n else:\n if answer.find('quit') != -1:\n self.running = False\n else:\n if answer.find('quit') != -1:\n self.running = False\n return answer", "def ask_question(self, question):\n self.response((question))\n return input()", "def prompt_user(prompt):\r\n # raw_input returns the empty string for \"enter\"\r\n yes = set(['yes', 'y'])\r\n no = set(['no','n'])\r\n\r\n try:\r\n print(prompt)\r\n choice = raw_input().lower()\r\n # would like to test for exception here, but not sure if we can do that without importing IPython\r\n except:\r\n print('Stdin is not implemented.')\r\n print('You need to set')\r\n print('overide_manual_authorize=True')\r\n print('to proceed with the download. Please set that variable and continue.')\r\n raise\r\n\r\n\r\n if choice in yes:\r\n return True\r\n elif choice in no:\r\n return False\r\n else:\r\n print(\"Your response was a \" + choice)\r\n print(\"Please respond with 'yes', 'y' or 'no', 'n'\")\r\n #return prompt_user()\r", "def getInput(prompt):\n if platform.python_version().startswith('3'):\n userInput = input('%s ' % prompt).strip()\n if platform.python_version().startswith('2'):\n userInput = raw_input('%s ' % prompt).strip()\n return userInput", "def prompt(prompt, validator=(lambda x: True), hint=None):\n user_input = input(prompt)\n while not validator(user_input):\n user_input = input(prompt)\n return user_input", "def rlinput(prompt, prefill=''):\n if \"readline\" not in sys.modules:\n # For example on Windows\n return input(prompt)\n else:\n readline.set_startup_hook(lambda: readline.insert_text(prefill))\n try:\n return input(prompt)\n finally:\n readline.set_startup_hook()", "def passPrompt(title, prompt):\n answer = tkSimpleDialog.askstring(title, prompt, show=\"*\")\n print answer", "def requestInput(st):\n return input(st+\": \")", "def prompt(msg):\n # remove non-blocking mode\n fd = sys.stdin.fileno()\n flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)\n flags = flags & ~os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)\n return raw_input(msg)", "def prompt(self, question):\n self.output(' ')\n self.output(question)\n self.output(self.parse_response(str(self.ui())))", "def user_prompt(prompt, default=None):\n prompt = f\"\\n {prompt} [{default}] runs or type an amount: \"\n response = input(prompt)\n if not response and default:\n return default\n else:\n return response", "def prompt_for_input(prepend_prompt=''):\n if not prepend_prompt == '':\n prepend_prompt += ' '\n return raw_input(prepend_prompt + '> ').strip()", "def prompt() -> None:\n\n username = click.prompt(\n text=\"Please enter a username\",\n type=click.STRING\n )\n password = click.prompt(\n text=\"Please enter a new password\",\n hide_input=True,\n confirmation_prompt=True\n )\n newsletter_subscription = click.prompt(\n text=\"Would you like to subscribe to our newsletter?\",\n default=False,\n type=click.BOOL\n )\n favorite_color=click.prompt(\n text=\"What is your favorite color?\",\n type=click.Choice([\"blue\", \"green\", \"yellow\"], case_sensitive=False)\n )\n\n click.echo(\n f\"Username: {username} | Password: {'*' * len(password)} | \"\n + f\"Newsletter: {newsletter_subscription} | Favorite color: \"\n + click.style(favorite_color, fg=favorite_color)\n )", "def prompt(self):\n # TODO: fix null input\n print('Enter user input: ')\n userinput = input()\n print(f'User chose: {userinput}')\n return userinput", "def pseudo_raw_input(self, prompt):\n\n if self.use_rawinput:\n try:\n line = sm.input(prompt)\n except EOFError:\n line = 'EOF'\n else:\n self.stdout.write(prompt)\n self.stdout.flush()\n line = self.stdin.readline()\n if not len(line):\n line = 'EOF'\n else:\n if line[-1] == '\\n': # this was always true in Cmd\n line = line[:-1]\n return line", "def __alt_prompt(self, prompt_text: str):\r\n if self.__use_windows_prompt:\r\n sys.stdout.write(prompt_text)\r\n sys.stdout.flush()\r\n i = sys.stdin.readline()\r\n return i.strip()\r\n return input(prompt_text)", "def prompt_str_input(prompt_name: str, get_user_input: GetInputFunc) -> str:\n try:\n return str(get_user_input(f\"type in {prompt_name}:\"))\n except (ValueError, IndexError) as e:\n raise InvalidInput(str(e))", "def _get_input(prompt, options, allow_new=False, reprompt_options=None):\n\n _lwr_opts = [x.lower() for x in options]\n if reprompt_options is None:\n reprompt_options = options\n\n while True:\n _resp = input(prompt).strip()\n\n # Check that input is one of the options\n try:\n i = _lwr_opts.index(_resp.lower())\n return options[i]\n except ValueError:\n if not allow_new:\n print(f'Response must be one of the following: {\", \".join(reprompt_options)}')\n\n if allow_new and _resp: # If have a non-empty string\n return _resp", "def raw_input(self, prompt=\"\"):\n return self.prefilter(raw_input_original(prompt),\n prompt==self.outputcache.prompt2)", "def ask(prompt):\n\n return renpy.exports.invoke_in_new_context(renpy.store.layout.yesno_prompt, None, prompt)", "def test_prompt_ask_say_missing_valid(self):\n with mock.patch('__builtin__.raw_input', return_value='mocked input') as mockinput:\n result = self.prompt._prompt({}, {\n 'ask': 'varname'\n })\n\n self.assertEquals(result['ansible_facts']['varname'], 'mocked input')", "def test_prompt_setInput_default_valid(self):\n self.prompt.setInput()\n\n self.assertEquals(\n self.prompt._instr,\n '/dev/tty'\n )\n\n with mock.patch('__builtin__.raw_input', return_value='mocked input') as mockinput:\n result = self.prompt._prompt({}, {\n 'say': 'test',\n 'ask': 'varname'\n })\n\n self.assertEquals(result['ansible_facts']['varname'], 'mocked input')", "def ask_password(self, prompt: str) -> str:\n raise NotImplementedError", "def prompt(msg, default=NO_DEFAULT, validate=None):\n while True:\n response = input(msg + \" \").strip()\n if not response:\n if default is NO_DEFAULT:\n continue\n return default\n if validate is None or validate(response):\n return response", "def get_input_from_player(text):\n return prompt.string(text)", "def do_prompt(self, line):\n if line:\n self.prompt = \"(%s) \" %line\n\n else:\n print 'Please specify a prompt text'", "def _ask_prompt(question: str,\n console: io.IO,\n validate: Optional[Callable[[str], None]] = None,\n default: Optional[str] = None) -> str:\n validate = validate or (lambda x: None)\n while True:\n answer = console.ask(question)\n if default and not answer:\n answer = default\n try:\n validate(answer)\n break\n except ValueError as e:\n console.error(e)\n\n return answer", "def safe_input(prompt=\"\"):\n\n\ttry:\n\t\tresult = input(prompt)\n\t\treturn result\n\texcept KeyboardInterrupt:\n\t\tsys.exit()\n\texcept:\n\t\treturn \"\"", "def ask_input(prompt = '', is_password = False):\n\n while True:\n answer = getpass.getpass() if is_password == True else input(prompt)\n if answer is not '':\n return answer", "def inask(question: str) -> str:\n answer = input(question)\n return answer", "def prompt(self, console: io.IO, step: str,\n args: Dict[str, Any]) -> Dict[str, Any]:\n pass", "def prompt(self, console: io.IO, step: str,\n args: Dict[str, Any]) -> Dict[str, Any]:\n pass", "def get_user_input(self, msg):\n resp = input(msg)\n return resp", "def prompt(self):\n return input(self.message + \": \").strip()", "def ask_user_input(self, sentence):\n user_input = raw_input(sentence + \" : \")\n return user_input", "def test_prompt_ask_var_simple_valid(self):\n with mock.patch('__builtin__.raw_input', return_value='mocked input') as mockinput:\n result = self.prompt._prompt({}, {\n 'say': 'test',\n 'ask': 'varname'\n })\n\n self.assertEquals(result['ansible_facts']['varname'], 'mocked input')", "def __ask_query(self):\n self.__output = list()\n return input(form('What do you want to search?\\n> '))", "def prompt(self, prompt_text: str) -> str:\r\n try:\r\n if self.__use_standard_console:\r\n user_input = prompt(prompt_text,\r\n history=self.__history,\r\n auto_suggest=self.__auto_suggest,\r\n completer=self.__completer)\r\n else:\r\n user_input = self.__alt_prompt(prompt_text)\r\n except KeyboardInterrupt:\r\n return self.__ctrl_c_command\r\n return user_input", "def test_prompt_setInput_stringio_valid(self):\n instr = StringIO.StringIO()\n self.prompt.setInput(instr)\n\n self.assertEquals(instr, self.prompt._instr)\n self.assertEquals(instr.getvalue(), \"\")\n\n with mock.patch('__builtin__.raw_input', return_value='mocked input') as mockinput:\n result = self.prompt._prompt({}, {\n 'say': 'test',\n 'ask': 'varname'\n })\n\n self.assertEquals(result['ansible_facts']['varname'], 'mocked input')", "def prompt_ip(prompt):\n response = \"\"\n while not is_valid_ip(response):\n response = prompt_base(prompt)\n return response", "def waitprompt(c):\n c.expect('\\n> ')\n time.sleep(0.1)", "def do_prompt(self):\n # we need _something_ in the dictionary even if the user decides to use all defaults\n # otherwise for some unknown reason it won't work\n user_in = {'__meta__': '__user_input__'}\n\n print('Please enter the information asked for in the following prompts in order to configure your deployment')\n # get the config information from the user\n for p in self.prompts:\n answer = input(p['prompt'])\n if len(answer.strip()) > 0 and 'variable' in p.keys():\n user_in[p['variable']] = answer\n\n # return the data\n return user_in", "def prompt_int(prompt):\n while True:\n try:\n return int(input(prompt))\n except ValueError as e:\n print('Provide an integer')", "def raw_input(self, prompt=''):\n self.new_prompt(prompt)\n self._input_state = 'raw_input'\n if hasattr(self, '_cursor'):\n del self._cursor\n self.SetCursor(wx.StockCursor(wx.CURSOR_CROSS))\n self.__old_on_enter = self._on_enter\n event_loop = wx.EventLoop()\n def my_on_enter():\n event_loop.Exit()\n self._on_enter = my_on_enter\n # XXX: Running a separate event_loop. Ugly.\n event_loop.Run()\n self._on_enter = self.__old_on_enter\n self._input_state = 'buffering'\n self._cursor = wx.BusyCursor()\n return self.input_buffer.rstrip('\\n')", "def _prompt(prompt):\n return raw_input(\"%s [yes or no]: \" % prompt) == \"yes\"", "def Wait(p_question: str):\n input(p_question)\n return", "def prompt(promptstring='>'):\n class PromptWithString(Prompt):\n def __init__(self, document, callback=None):\n Prompt.__init__(self, document, callback)\n self.promptstring = promptstring\n return PromptWithString", "def AskString(prompt, default = \"\", id=261, ok=None, cancel=None):\n\n raise NotImplementedError(\"AskString\")", "def _prompt(letters='yn', default=None):\n\n import sys\n while True:\n try:\n inputstr = sys.stdin.readline().strip()\n except KeyboardInterrupt:\n sys.exit(0)\n if inputstr and inputstr in letters:\n return inputstr\n if default is not None and inputstr == '':\n return default\n print 'Come again?'", "def ask_for_query():\n print('Enter query, empty to quit:')\n try:\n query = input('? ')\n except EOFError:\n # User has cancelled\n return False\n\n return query", "def pseudo_raw_input(self, prompt):\n\n # Deal with the vagaries of readline and ANSI escape codes\n safe_prompt = self._surround_ansi_escapes(prompt)\n\n if self.use_rawinput:\n try:\n if sys.stdin.isatty():\n line = sm.input(safe_prompt)\n else:\n line = sm.input()\n if self.echo:\n sys.stdout.write('{}{}\\n'.format(safe_prompt, line))\n except EOFError:\n line = 'eof'\n else:\n if self.stdin.isatty():\n # on a tty, print the prompt first, then read the line\n self.poutput(safe_prompt, end='')\n self.stdout.flush()\n line = self.stdin.readline()\n if len(line) == 0:\n line = 'eof'\n else:\n # we are reading from a pipe, read the line to see if there is\n # anything there, if so, then decide whether to print the\n # prompt or not\n line = self.stdin.readline()\n if len(line):\n # we read something, output the prompt and the something\n if self.echo:\n self.poutput('{}{}'.format(safe_prompt, line))\n else:\n line = 'eof'\n return line.strip()", "def input_with_default(prompt, default):\n response = raw_input(\"%s (Default %s) \"%(prompt, default))\n if not response:\n return default\n return response", "def do_prompt(self, line):\n self.prompt = line + ': '", "def prompt(self):\n self.prompt_flag = True", "def _prompt(letters='yn', default=None):\n while True:\n try:\n input_text = sys.stdin.readline().strip()\n except KeyboardInterrupt:\n sys.exit(0)\n if input_text and input_text in letters:\n return input_text\n if default is not None and input_text == '':\n return default\n print('Come again?')", "def launch_request_handler(handler_input):\n # type: (HandlerInput) -> Response\n speech = \"Welcome to the Merriam-Webster Dictionary. What word can I look up for you?\"\n reprompt = \"You can say: definition of word, example of word, or synonym of word.\"\n\n handler_input.response_builder.speak(speech).ask(reprompt)\n return handler_input.response_builder.response", "def askforinput(msg='Do you want to proceed?', tab='', newline='\\n'):\n while True:\n inp = input(ColorText(f\"{newline}{tab}INPUT NEEDED: {msg} \\n{tab}(yes | no): \").warn().__str__()).lower()\n if inp in ['yes', 'no']:\n if inp == 'no' and msg=='Do you want to proceed?':\n print(ColorText('exiting %s' % sys.argv[0]).fail())\n exit()\n break\n else:\n print(ColorText(\"Please respond with 'yes' or 'no'\").fail())\n return inp", "def input(cls, prompt=''):\n text_in = cls.input_reference(prompt)\n corrected_text = cls.auto_correct(text_in)\n return corrected_text", "def _get_input(question: str) -> str:\n print(question)\n sys.stdout.flush()\n user_input = sys.stdin.readline()\n user_input = user_input.strip()\n return user_input", "def prompt():\n sys.stdout.write('>> ')\n sys.stdout.flush()", "def test_prompt_msg_shows_default(self):\n with mock.patch('__builtin__.raw_input', return_value=\"Andrew\") as mockinput:\n result = self.prompt._prompt(self.response, {\n \"say\": \"First Name\",\n \"ask\": \"first_name\",\n \"default\": \"foobar\"\n })\n\n args, kwargs = mockinput.call_args\n\n self.assertEquals(\"First Name [foobar]? \", args[0])\n self.assertEquals(result['ansible_facts']['first_name'], 'Andrew')", "def prompt(text, choices):\n text += \" [\" + \"/\".join(choices) + \"] \"\n while True:\n inp = input(text)\n if inp in choices:\n return inp", "def prompt_user(prompt: str) -> bool:\n positive_response = {'yes', 'y', 'ye', '', 'ok'}\n negative_response = {'no', 'n'}\n\n while True:\n answer = input(prompt).lower()\n if answer in positive_response:\n return True\n elif answer in negative_response:\n return False\n else:\n print(\"Please respond with 'yes' or 'no'\\n\", file=sys.stderr)", "def prompt(self, upstream_name):\n request = PromptRequest(upstream_name=upstream_name)\n response = self.stub.Prompt(request, timeout=5)", "def test_prompt_msg_defaults(self):\n with mock.patch('__builtin__.raw_input', return_value=\"\") as mockinput:\n result = self.prompt._prompt(self.response, {\n \"say\": \"First Name\",\n \"ask\": \"first_name\",\n \"default\": \"foobar\"\n })\n\n args, kwargs = mockinput.call_args\n\n self.assertEquals(\"First Name [foobar]? \", args[0])\n self.assertEquals(result['ansible_facts']['first_name'], 'foobar')", "def input_fake_rock(prompt):\n\n\tprint(prompt)\n\treturn 'rock'", "def get_input(prompt, default=None, choices=None, option_value=None):\r\n if option_value is not None:\r\n return option_value\r\n \r\n choices = choices or []\r\n while 1:\r\n r = raw_input(prompt+' ').strip()\r\n if not r and default is not None:\r\n return default\r\n if choices:\r\n if r not in choices:\r\n r = None\r\n else:\r\n break\r\n else:\r\n break\r\n return r", "def test_prompt_ask_var_numbers_valid(self):\n with mock.patch('__builtin__.raw_input', return_value='mocked input') as mockinput:\n result = self.prompt._prompt({}, {\n 'say': 'test',\n 'ask': '12345'\n })\n\n self.assertEquals(result['ansible_facts']['12345'], 'mocked input')", "def prompt(self):\r\n super().prompt_number()\r\n self.email = str(input(\"Email: \"))", "def ask(question=WARNING_DIFF):\n\t\t\tfd = sys.stdin.fileno()\n\n\t\t\toldterm = termios.tcgetattr(fd)\n\t\t\tnewattr = termios.tcgetattr(fd)\n\t\t\tnewattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO\n\t\t\ttermios.tcsetattr(fd, termios.TCSANOW, newattr)\n\n\t\t\toldflags = fcntl.fcntl(fd, fcntl.F_GETFL)\n\t\t\tfcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)\n\n\t\t\tself.stdout.write(question)\n\n\t\t\ttry:\n\t\t\t\twhile True:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tfirstCharacter = sys.stdin.read(1)\n\t\t\t\t\t\treturn forceUnicode(firstCharacter) in (u\"y\", u\"Y\")\n\t\t\t\t\texcept IOError:\n\t\t\t\t\t\tpass\n\t\t\tfinally:\n\t\t\t\ttermios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)\n\t\t\t\tfcntl.fcntl(fd, fcntl.F_SETFL, oldflags)", "def ask_user( prompt ):\n answer = raw_input( prompt )\n if answer.lower() in [\"y\",\"yes\"]:\n return True\n else:\n return False", "def safe_input(response):\n try:\n return input(response)\n except EOFError:\n return None\n except KeyboardInterrupt:\n return None", "def prompt_string(prompt=\"Enter a value\",\n default=None):\n _new = None\n while True:\n try:\n _new = str(input(f\"{prompt}? [{str(default)}]: \")) # nosec\n break\n except ValueError:\n print(\"Sorry, I didn't understand that.\")\n continue\n except KeyboardInterrupt:\n break\n return default if _new in [None, ''] else _new", "def input_prompt(self):\n return 'Stock code:'", "def put_prompt(self, session):\n self.reply_text(session, self._prompt, False)", "def Prompt(self,message):\n\t\tself.acad.ActiveDocument.Utility.Prompt(message)", "def get_input():\n return getch()", "def simple_response(prompt, default=None):\n if default is None:\n response = input(prompt + ': ')\n else:\n response = input(prompt + f' [{default}]' + ': ')\n if response != '':\n return response\n elif response == '' and default is not None:\n return default\n else:\n print('Please enter a valid response')\n return simple_response(prompt, default)", "def prompt(name, default):\n value = raw_input('%s [%s]: ' %(name, default))\n if not value:\n value = default\n return value", "def prompt_selection(self,\r\n prompt_text: str,\r\n validate: Union[Callable[[str], Optional[Any]], partial],\r\n default: Any) -> Any:\r\n while True:\r\n try:\r\n if self.__use_standard_console:\r\n user_input = prompt(prompt_text)\r\n else:\r\n user_input = self.__alt_prompt(prompt_text)\r\n except KeyboardInterrupt:\r\n return default\r\n if user_input == '':\r\n return default\r\n user_input = validate(user_input)\r\n if user_input is not None:\r\n break\r\n return user_input", "def prompt(question):\n print('\\n')\n while True:\n reply = str(input(question+' (y/n): ')).lower().strip()\n if reply[:1] == 'y':\n return True\n if reply[:1] == 'n':\n return False", "def cont():\n\n try:\n input = raw_input()\n except Exception:\n pass", "def test_perform_get_input_raw_input(monkeypatch):\n monkeypatch.setattr(\"builtins.input\", lambda p: \"my name\" if p == \"> \" else \"boo\")\n assert sync_perform(stdio_dispatcher, Effect(Prompt(\"> \"))) == \"my name\"", "def prompt_with_options(prompt, default=None, options=None):\n\n msg = \"%s [%s]: \" % (prompt, default) if default is not None else \"%s: \" % prompt\n value = None\n while value is None:\n value = raw_input(msg).strip()\n if value:\n if options and value not in options:\n value = None\n elif default is not None:\n value = default\n\n return value", "def read_user_input(self):\n\n self.commandline = raw_input(\"Enter the string you want to parse\\n\")", "def prompt_user_account_to_deposit():\n print('What account do you want to deposit to?:')\n return input()", "def user_input(self, options, prompt):\n for o in options:\n line = self.selector_line(o)\n o[\"line\"] = line\n self.output(line)\n self.output(prompt, end=\" \")\n while True:\n if self.test_input:\n inp = self.test_input.pop(0)\n self.output(f\"Using '{inp}' test input\")\n else:\n try:\n inp = raw_input()\n except (IOError, KeyboardInterrupt):\n self.game.print_state()\n raise\n if inp:\n matching = []\n for o in options:\n if o[\"selector\"] == inp:\n return o\n if inp.lower() in o[\"line\"].lower() and o[\"selector\"] != \"-\":\n matching.append(o)\n if len(matching) == 1:\n return matching[0]\n self.output(f\"Invalid Option ({inp})\")", "def ask_string(message=\"Enter something.\", title=None):\n return dialog(\"ask_string\", message=message, title=title)" ]
[ "0.8005862", "0.7744237", "0.7744237", "0.7655172", "0.76528496", "0.75383127", "0.7535779", "0.72016025", "0.7145495", "0.71204597", "0.7108815", "0.70427936", "0.69868904", "0.69493484", "0.69454503", "0.69408095", "0.692065", "0.6888201", "0.6861148", "0.6780953", "0.6747571", "0.6711964", "0.6700579", "0.66943324", "0.66941994", "0.66802555", "0.66690457", "0.66633075", "0.66610426", "0.66579795", "0.6655188", "0.6598989", "0.65348095", "0.6514758", "0.65053946", "0.6498659", "0.64954513", "0.64943767", "0.64875436", "0.6467011", "0.6465322", "0.64612156", "0.64471734", "0.64471734", "0.6431825", "0.6420201", "0.64176613", "0.6416124", "0.6406553", "0.63943624", "0.63926154", "0.6391696", "0.6388536", "0.6377578", "0.6358193", "0.6351446", "0.633976", "0.6323616", "0.6317363", "0.63167363", "0.63137996", "0.63028026", "0.6302583", "0.6295087", "0.6292915", "0.6278397", "0.6277735", "0.62638354", "0.62602043", "0.62455493", "0.6235225", "0.62188226", "0.62037945", "0.62033033", "0.6199298", "0.619449", "0.6190988", "0.618786", "0.6184452", "0.61730844", "0.6158315", "0.6146451", "0.61309713", "0.6127223", "0.61254853", "0.6120287", "0.61193836", "0.61033237", "0.60983", "0.60944134", "0.60942227", "0.6087321", "0.60863835", "0.6079797", "0.6078493", "0.60748047", "0.6048813", "0.6045901", "0.6044351", "0.60413074" ]
0.66135526
31
Prints to the Pyto console, not to the stdout. Works as the builtin `print` function but does not support printing to a custom file. Pyto catches by default the stdout and the stderr, so use the builtin function instead. This function is mainly for internal use.
def print(*objects, sep=None, end=None): if sep is None: sep = ' ' if end is None: end = '\n' array = map(str, objects) __PyOutputHelper__.print(sep.join(array)+end)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def printout(*args, **kwargs):\n console_print(sys.stdout, *args, **kwargs)", "def real_print(*args, **kwargs):\n\n kwargs.setdefault('file', real_stdout)\n _python_print_function(*args, **kwargs)", "def escaped_printer(to_write):\n # suppress(anomalous-backslash-in-string)\n to_write = to_write.replace(\";\", \"{c};\".format(c=char))\n to_write = to_write.replace(\"\\n\", \";\\n\") + \";\\n\"\n\n if file_object:\n file_object.write(to_write)\n else:\n sys.stdout.write(to_write)", "def print(*args, **kwargs):\n with P_LOCK:\n __builtins__.print(*args, **kwargs)", "def hook_print():\n sys.stdout = PrintHook()", "def disable_print_statements_on_console(func):\n\n @wraps(func)\n def wrap(*args, **kw):\n suppress_text = io.StringIO()\n sys.stdout = suppress_text\n result = func(*args, **kw)\n sys.stdout = sys.__stdout__\n return result\n\n return wrap", "def prnt(printstring, silent=False):\n if not silent:\n stdout.write(printstring)", "def _default_eprint_worker(*args, **kwargs):\r\n kwargs[\"file\"] = sys.stderr\r\n print(*args, **kwargs)", "def eprint(*args, **kwargs):\n\tprint(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def stderr_print(*args, **kwargs):\n\n sys.stdout.flush()\n print(*args, **kwargs, file=sys.stderr)\n sys.stderr.flush()\n\n # else caller has to \"{}\\n\".format(...) and flush", "def console_print(out, *args, **kwargs):\n const_charset = stream_encoding(out)\n out.write(' '.join([a.encode(cons_charset, 'replace') for a in args]))\n if kwargs.get('newline', True):\n out.write('\\n')", "def print(self, *args, **kwargs):\n print(*args, **kwargs)", "def setPrint():\n (e,d,sr,sw) = codecs.lookup('utf-8')\n unicode_to_utf8 = sw(sys.stdout)\n sys.stdout = unicode_to_utf8", "def err_print(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def print_(*args, **kwargs):\n fp = kwargs.pop(\"file\", sys.stdout)\n if fp is None:\n return\n\n def write(data):\n if not isinstance(data, basestring):\n data = str(data)\n fp.write(data)\n want_unicode = False\n sep = kwargs.pop(\"sep\", None)\n if sep is not None:\n if isinstance(sep, unicode):\n want_unicode = True\n elif not isinstance(sep, str):\n raise TypeError(\"sep must be None or a string\")\n end = kwargs.pop(\"end\", None)\n if end is not None:\n if isinstance(end, unicode):\n want_unicode = True\n elif not isinstance(end, str):\n raise TypeError(\"end must be None or a string\")\n if kwargs:\n raise TypeError(\"invalid keyword arguments to print()\")\n if not want_unicode:\n for arg in args:\n if isinstance(arg, unicode):\n want_unicode = True\n break\n if want_unicode:\n newline = unicode(\"\\n\")\n space = unicode(\" \")\n else:\n newline = \"\\n\"\n space = \" \"\n if sep is None:\n sep = space\n if end is None:\n end = newline\n for i, arg in enumerate(args):\n if i:\n write(sep)\n write(arg)\n write(end)", "def safe_print(text, file=sys.stdout):\n if not isinstance(text, basestring):\n return print(text, file=file)\n try:\n file.write(text)\n except UnicodeEncodeError:\n bytes_string = text.encode(file.encoding, 'backslashreplace')\n if hasattr(file, 'buffer'):\n file.buffer.write(bytes_string)\n else:\n text = bytes_string.decode(file.encoding, 'strict')\n file.write(text)\n file.write(\"\\n\")", "def cmdPrint( self, *args):\n return self.cmd( *args, **{ 'verbose': True } )", "def print_out():\n pass", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n fp.write(data)\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n fp.write(data)\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def out(*args):\r\n print(*args)", "def pflush(*args, **kwargs):\n print(*args, **kwargs)\n sys.stdout.flush()", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n fp.write(data)\r\n\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def print(*objects, **kwargs):\n try:\n stream = kwargs.get('file', None)\n if stream is None:\n stream = sys.stdout\n enc = stream.encoding\n if enc is None:\n enc = sys.getdefaultencoding()\n except AttributeError:\n return __builtins__.print(*objects, **kwargs)\n texts = []\n for object in objects:\n try:\n original_text = str(object)\n except UnicodeEncodeError:\n original_text = unicode(object)\n texts.append(original_text.encode(enc, errors='replace').decode(enc))\n return __builtins__.print(*texts, **kwargs)", "def pprint(*args, **kwargs):\n if PRINTING:\n print(*args, **kwargs)", "def printerr(*args, **kwargs):\n console_print(sys.stderr, *args, **kwargs)", "def use_pypprint_for_implicit_print(self) -> None:\n if self.implicit_print is not None:\n self.implicit_print.func.id = \"pypprint\" # type: ignore\n # Make sure we import it later\n self.undefined.add(\"pypprint\")", "def _print(self, *args):\n return _ida_hexrays.vd_printer_t__print(self, *args)", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n # If the file has an encoding, encode unicode with it.\r\n if (isinstance(fp, file) and\r\n isinstance(data, unicode) and\r\n fp.encoding is not None):\r\n errors = getattr(fp, \"errors\", None)\r\n if errors is None:\r\n errors = \"strict\"\r\n data = data.encode(fp.encoding, errors)\r\n fp.write(data)\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def betterprint(text):\n try:\n print(text)\n except OSError as e:\n pass", "def output(*args):\n print(*args, end='', file=file)", "def print(*args, **kwargs):\n new_args = []\n for arg in args:\n if builtins.isinstance(arg, models.Point):\n new_args.append(\"({0}, {1})\".format(arg.x, arg.y))\n else:\n new_args.append(arg)\n\n builtins.print(*new_args, **kwargs)", "def _default_vprint_worker(*args, **kwargs):\r\n print(*args, **kwargs)", "def _p(self, *args, level=2, **kwargs):\n if self._verbosity >= level:\n print(*args, **kwargs)", "def _print(self, text):\n\t\tif self.verbose:\n\t\t\tprint text", "def print_err(*args, **kwargs):\n print(*args, file=stderr, **kwargs)", "def console(out):\n logging.debug(out)\n try:\n print(out)\n except UnicodeEncodeError:\n print(re.sub(r'([^\\s\\w]|_)+', '', out))", "def paste_to_stdout(self, text):\n _builtin_print(text)\n return self", "def p(self):\n self.printstdout = True", "def blockPrint():\n sys.stdout = open(os.devnull, 'w')", "def _Print(self, t):\n self.RaiseError(t, \"Print not supported\")", "def _redefine_print(is_main):\n import builtins as __builtin__\n\n builtin_print = __builtin__.print\n\n def print(*args, **kwargs):\n force = kwargs.pop(\"force\", False)\n if is_main or force:\n builtin_print(*args, **kwargs)\n\n __builtin__.print = print", "def print_and_return(*args, **kwargs):\n print(*args, end=\"\\r\", **kwargs)", "def print(text):\n\n return builtin_print('{} | {}'.format(\n time.strftime('%H:%M:%S', time.gmtime()),\n text\n ))", "def custom_print(*objects):\n print(*objects, sep=OFS, end=ORS)", "def stdout(self):\n pass", "def capture_print():\n\n old_streams = sys.stdout, sys.stderr\n sys.stdout = sys.stderr = io.StringIO()\n filestring = FileString(sys.stdout)\n try:\n yield filestring\n finally:\n sys.stdout, sys.stderr = old_streams\n filestring.read()", "def adv_print(*args, start='', in_file = False, **kwargs):\n max_line = kwargs.pop('max_line', False)\n print(kwargs)\n old_stdout = sys.stdout\n value = StringIO()\n sys.stdout = value\n print(*args, **kwargs)\n sys.stdout = old_stdout\n value = value.getvalue()\n value = start + value\n if max_line:\n value = value[:max_line] + '\\n' + value[max_line:]\n if in_file:\n if 'filename' in kwargs:\n filename = kwargs['filename']\n else:\n filename = 'output.txt'\n with open(filename, 'w') as f:\n f.write(value)\n print(value)", "def tprint(msg):\n sys.stdout.write(msg + '\\n')\n sys.stdout.flush()", "def tprint(msg):\n sys.stdout.write(msg + '\\n')\n sys.stdout.flush()", "def eprint(*pargs, **kargs):\n print('\\u001b[31m', end='', file=sys.stderr)\n print(*pargs, file=sys.stderr, **kargs)\n print('\\u001b[0m', end='', file=sys.stderr)", "def to_print_out(self):\n self.error_throw('output')\n\n if self.rank_method == methods_of_ranking[3]: #'diversified_ranking'\n self.output_div('print')\n else:\n self.output('print')", "def error(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def println(message, verbose_only=False):\n if verbose_only and not system.config['verbose']:\n return\n print(message)", "def test_print(chikin):\n chikin.print()", "def test_03_pass_print(self):\n print('Hello World!')", "def repl_print_statements():\n pass", "def console(self):\n fricas_console()", "def color_print(*args, **kwargs):\n file = kwargs.get('file', sys.stdout)\n\n end = kwargs.get('end', '\\n')\n\n write = file.write\n if file.isatty():\n for i in range(0, len(args), 2):\n msg = args[i]\n if i + 1 == len(args):\n color = ''\n else:\n color = args[i + 1]\n\n if color:\n msg = _color_text(msg, color)\n\n # Some file objects support writing unicode sensibly on some Python\n # versions; if this fails try creating a writer using the locale's\n # preferred encoding. If that fails too give up.\n if not PY3 and isinstance(msg, bytes):\n msg = _decode_preferred_encoding(msg)\n\n write = _write_with_fallback(msg, write, file)\n\n write(end)\n else:\n for i in range(0, len(args), 2):\n msg = args[i]\n if not PY3 and isinstance(msg, bytes):\n # Support decoding bytes to unicode on Python 2; use the\n # preferred encoding for the locale (which is *sometimes*\n # sensible)\n msg = _decode_preferred_encoding(msg)\n write(msg)\n write(end)", "def _print(txt):\n\n # Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.\n # Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.\n # Style: DIM, NORMAL, BRIGHT, RESET_ALL\n print('{0}{1}'.format(Style.BRIGHT + txt, Fore.RESET + Back.RESET + Style.RESET_ALL))", "def _PRINT_DEBUG(*args):\n print(sys.stderr, args, file=sys.stderr)", "def pypprint(*args, **kwargs): # type: ignore\n from typing import Iterable\n\n if len(args) != 1:\n print(*args, **kwargs)\n return\n x = args[0]\n if isinstance(x, dict):\n for k, v in x.items():\n print(f\"{k}:\", v, **kwargs)\n elif isinstance(x, Iterable) and not isinstance(x, str):\n for i in x:\n print(i, **kwargs)\n else:\n print(x, **kwargs)", "def test_perform_display_print(capsys):\n assert sync_perform(stdio_dispatcher, Effect(Display(\"foo\"))) is None\n out, err = capsys.readouterr()\n assert err == \"\"\n assert out == \"foo\\n\"", "def write(message):\n __terminalState.osSupport.print(message)", "def print_(self, s: str) -> None:", "def safeprint(message, write_to_stderr=False, newline=True):\n try:\n click.echo(message, nl=newline, err=write_to_stderr)\n except IOError as err:\n if err.errno is errno.EPIPE:\n pass\n else:\n raise", "def do_print(self, cmd):\n try:\n print(self.EvalExpression(cmd))\n except:\n pass", "def _print_custom(self):\n pass", "def _print(self, *args, **kwargs) -> None:\n # Only print in verbose mode\n if self._verbose:\n arglist = list(args)\n arglist[0] = f\"[buddy-{self._experiment_name}] {args[0]}\"\n print(*arglist, **kwargs)", "def safe_print(*objs, errors=\"replace\"):\n\tprint(*(to_stdout(str(o), errors) for o in objs))", "def output(text):\n sys.stdout.write(text)", "def do_print(self, line):\n cmd_args = io.parse_cmd_args(line, io.output_cmd_pattern)\n if cmd_args:\n success = self.manager.print_to_console(\n cmd_args.get('target'), \n cmd_args.get('filters')\n )\n if success:\n self.console_print(\"There, you asked for it!\", settings.INFO_FORMAT)\n else:\n self.console_print(\"Sorry, something kinda went wrong! You can try again.\", settings.ERROR_FORMAT)\n else:\n self.console_print(settings.COMMMAND_ARGS_ERROR_MSG, settings.ERROR_FORMAT)", "def bpprint(self, out=None):\n if out is None:\n out = sys.stdout\n print(self.bpformat(), file=out)", "def print_to_string(*args, text):\n print(*args)\n text.append(' '.join(args)+'\\n')", "def emu_print(text):\n print \"%s %s\" % (EMU_PRINT_PREFIX, text)", "def static_print(*args, __p=print, **kwargs):\n __p(*args, **kwargs)", "def pr(string, verbose):\n if(verbose):\n print(string)", "def stdout(msg):\n sys.stdout.write(msg)\n sys.stdout.flush()", "def display():\n\n # Check the pipe setup.\n check_pipe_setup(sequence=True, j=True)\n\n # Call the write method with sys.stdout as the file.\n write(file=sys.stdout)", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def println(self, text=''):\n self.print(text)\n self.print(\"\\n\")", "def write(text, output_file=None, fg=None, bg=None): # pylint: disable=unused-argument\n if output_file is None:\n output_file = sys.stdout\n output_file.write(text)", "def _get_print_fn(file=sys.stdout):\n def _print_fn(op, xin,):\n for attr in op.attrs:\n temp = getattr(xin, attr)\n if callable(temp):\n pmsg = temp()\n else:\n pmsg = temp\n print(op.message, attr, '=', pmsg, file=file)\n return _print_fn", "def Print(self, text):\n pass", "def debugprint(obj, depth=-1, print_type=False,\r\n file=None, ids='CHAR', stop_on_name=False):\r\n if file == 'str':\r\n _file = StringIO()\r\n elif file is None:\r\n _file = sys.stdout\r\n else:\r\n _file = file\r\n done = dict()\r\n results_to_print = []\r\n order = []\r\n if isinstance(obj, gof.Variable):\r\n results_to_print.append(obj)\r\n elif isinstance(obj, gof.Apply):\r\n results_to_print.extend(obj.outputs)\r\n elif isinstance(obj, Function):\r\n results_to_print.extend(obj.maker.fgraph.outputs)\r\n order = obj.maker.fgraph.toposort()\r\n elif isinstance(obj, (list, tuple)):\r\n results_to_print.extend(obj)\r\n elif isinstance(obj, gof.FunctionGraph):\r\n results_to_print.extend(obj.outputs)\r\n order = obj.toposort()\r\n elif isinstance(obj, (int, long, float, numpy.ndarray)):\r\n print obj\r\n else:\r\n raise TypeError(\"debugprint cannot print an object of this type\", obj)\r\n for r in results_to_print:\r\n debugmode.debugprint(r, depth=depth, done=done, print_type=print_type,\r\n file=_file, order=order, ids=ids,\r\n stop_on_name=stop_on_name)\r\n if file is _file:\r\n return file\r\n elif file == 'str':\r\n return _file.getvalue()\r\n else:\r\n _file.flush()", "def _print_output(*args):\n for arg in args:\n print(arg)\n print('\\n')", "def _PrintFunc(self, obj=None, verbose=False, summarize=True, recursive=False,\n use_pager=None, to_file=None):\n if obj is not None:\n self._printed_variables.append(obj)\n lines = describe.GenerateLines(\n obj, verbose=verbose, recursive=recursive, summarize=summarize,\n format_name='text')\n _WriteToStream(lines, use_pager=use_pager, to_file=to_file)", "def _print(self, *args):\n return _ida_hexrays.qstring_printer_t__print(self, *args)", "def print_msg(*vargs, **kwargs):\n print(*vargs, **kwargs)", "def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line", "def stdout(self) -> str:\n _args: list[Arg] = []\n _ctx = self._select(\"stdout\", _args)\n return _ctx.execute_sync(str)", "def tprint(*args, **kwargs):\r\n tprint_worker(*args, **kwargs)", "def _write_print_mode(self):\n self.write(self.ASCII_ESC, '!', self._print_mode)", "def capture_stdout(sq, method):\n capture = io.StringIO()\n sys.stdout = capture\n if method == \"print\":\n print(sq)\n else:\n sq.display()\n sys.stdout = sys.__stdout__\n return capture", "def print(self):\n # Your implementation here" ]
[ "0.7033177", "0.6971747", "0.66082984", "0.6593919", "0.6439161", "0.64060444", "0.6342973", "0.6337713", "0.6297447", "0.625069", "0.625069", "0.625069", "0.625069", "0.625069", "0.625069", "0.6228058", "0.6203651", "0.61796016", "0.6176492", "0.60885143", "0.6076104", "0.60673904", "0.6053973", "0.6027484", "0.5978552", "0.5978552", "0.5971651", "0.5958389", "0.5939901", "0.59299034", "0.5914348", "0.5890965", "0.58739364", "0.58565474", "0.58496475", "0.58462983", "0.58337843", "0.5785186", "0.5783162", "0.57711", "0.57688445", "0.575587", "0.5741022", "0.57298625", "0.5705655", "0.57021123", "0.56777674", "0.566126", "0.5658265", "0.56527716", "0.5647781", "0.56472284", "0.5644083", "0.56216335", "0.5580733", "0.5580733", "0.5574288", "0.55722713", "0.55665827", "0.5564889", "0.5547946", "0.5542603", "0.55184054", "0.5505193", "0.54894876", "0.5484048", "0.5467635", "0.5466803", "0.54643804", "0.5463522", "0.54606295", "0.5451129", "0.5430447", "0.5413198", "0.5411748", "0.54088104", "0.54014355", "0.53899974", "0.5389908", "0.53882277", "0.53848386", "0.538365", "0.53717524", "0.5363762", "0.5361986", "0.5353057", "0.5348141", "0.5344333", "0.5343155", "0.53354686", "0.5334934", "0.5313604", "0.5312808", "0.5311302", "0.53101075", "0.53058755", "0.5295335", "0.52922446", "0.52906096", "0.52833045", "0.5279686" ]
0.0
-1
Expected defaults when no project exists
def test_no_project_defaults(self): ep = exposed.ExposedProject() self.assertIsNone(ep.display) self.assertIsNone(ep.shared) self.assertIsNone(ep.settings) self.assertIsNone(ep.title) self.assertIsNone(ep.id) self.assertIsNone(ep.path()) with self.assertRaises(RuntimeError): ep.title = 'Some Title'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_project(self):\n pass", "def _determine_default_project(project=None):\n if project is None:\n project = _get_gcd_project()\n\n if project is None:\n project = _helpers._determine_default_project(project=project)\n\n return project", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def project():", "def project():", "def project():", "def configure_project():\n pass", "def test_read_project(self):\n pass", "def test_read_project(self):\n pass", "def test_replace_project(self):\n pass", "def test_get_projects(self):\n pass", "def test_create_project_request(self):\n pass", "def test_add_project(self):\n pass", "def test_missing_project(self):\n task = Task({\n 'name': 'test',\n 'id': 1,\n 'stage_id' : [1, 'name'],\n 'date_deadline': False,\n 'date_start': False,\n 'date_end': False,\n 'partial_messages': [{'date':'2018-10-21 12:00:00'}],\n 'kanban_state': 'blocked',\n 'planned_hours': 100,\n 'priority': '1'\n })\n self.assertIsNotNone(task)\n self.assertEqual(task.project, 'Not assigned to project')", "def test_no_such_project(self):\n project = cd.project.get_internal_project()\n cd.project.load(None)\n\n with self.assertRaises(Exception):\n self.run_step('FAKE')\n\n cd.project.load(project)", "def project(project_no_init: Project) -> Project:\n from pdm.cli.utils import merge_dictionary\n\n data = {\n \"project\": {\n \"name\": \"test-project\",\n \"version\": \"0.0.0\",\n \"description\": \"\",\n \"authors\": [],\n \"license\": {\"text\": \"MIT\"},\n \"dependencies\": [],\n \"requires-python\": \">=3.7\",\n },\n \"build-system\": DEFAULT_BACKEND.build_system(),\n }\n\n merge_dictionary(project_no_init.pyproject._data, data)\n project_no_init.pyproject.write()\n # Clean the cached property\n project_no_init._environment = None\n return project_no_init", "def init_project(self,project_name,project_dir):\n projectkey = id_generator(10)\n if \"towercrane\" not in os.listdir(project_dir):\n print(f'Initializing project:\"{project_name}\" with projectkey: \"{projectkey}\" ')\n self.TowercraneConfig = {\"project_name\":project_name,\n \"projectkey\":projectkey,\n \"publicurl\":\"private_project\"\n }\n write_config(project_dir,self.TowercraneConfig)\n project_insert_report = self.db.create_project(project_name,project_dir,projectkey)\n print(project_insert_report)\n \n elif \"towercrane\" in os.listdir(project_dir):\n self.TowercraneConfig = read_config(project_dir)\n print(f'project:\"{self.TowercraneConfig[\"project_name\"]}\" with projectkey: \"{self.TowercraneConfig[\"projectkey\"]}\" Already Exists')", "def project_default(tmp_path):\n from nitpick.constants import NITPICK_STYLE_TOML\n from tests.helpers import ProjectMock, tomlstring\n\n nitpick_style = Path(__file__).parent.parent / NITPICK_STYLE_TOML\n return ProjectMock(tmp_path).pyproject_toml(\n f\"\"\"\n [tool.nitpick]\n style = {tomlstring(nitpick_style)}\n \"\"\"\n )", "def test_patch_project(self):\n pass", "def init():\n defaults = _project_defaults()\n\n if Project.prompt:\n defaults['name'] = prompt(\"Enter the project's name:\", defaults['name'])\n defaults['package'] = prompt(\"Enter the project's package:\", defaults['package'])\n defaults['author'] = prompt(\"Enter the project's author:\", defaults['author'])\n defaults['author_email'] = prompt(\"Enter the project's author's email:\", defaults['author_email'])\n defaults['description'] = prompt(\"Enter the project's description:\", defaults['description'])\n\n # print(\"defaults:\\n{defaults}\".format(defaults=pformat(defaults)))\n\n if Project.use_templates:\n\n template = Template()\n\n for template_dir in [os.path.abspath(os.path.join(herringlib, 'herringlib', 'templates'))\n for herringlib in HerringFile.herringlib_paths]:\n\n info(\"template directory: %s\" % template_dir)\n # noinspection PyArgumentEqualDefault\n template.generate(template_dir, defaults, overwrite=False)", "def test_list_project(self):\n pass", "def default_context(project_name: str) -> None:\n return BuilderContext(\n project_name=project_name,\n kube_name=project_name.replace(\"_\", \"-\"),\n project_description=\"Generated by pytest.\",\n ci_type=CIType.none,\n db=DatabaseType.none,\n db_info=DB_INFO[DatabaseType.none],\n enable_redis=False,\n enable_migrations=False,\n enable_kube=False,\n enable_routers=True,\n add_dummy=False,\n self_hosted_swagger=False,\n force=True,\n )", "def test_no_project_id(self):\n\n self.assertRaises(Exception, kaput.init, 'abc', None)", "def get_project(con):\n try:\n return con.project_read(fq_name=conf.get('default_project', 'UNEXPECTED_VALUE'))\n except:\n log.debug('Unable to find project default-domain, admin:', exc_info=True)\n return None", "def test_not_github(self):\n project_src_path = 'project-src'\n os.environ['PROJECT_SRC_PATH'] = project_src_path\n generic_ci_env = platform_config.BasePlatformConfig()\n self.assertEqual(generic_ci_env.project_src_path, project_src_path)", "def test_default_init_parameters(isolated_runner, mocker, project_init, template):\n create_from_template = mocker.patch(\"renku.core.commands.init.create_from_template\")\n mocker.patch(\"renku.cli.githooks.install\")\n\n data, commands = project_init\n\n new_project = Path(data[\"test_project\"])\n assert not new_project.exists()\n result = isolated_runner.invoke(cli, commands[\"init_test\"] + commands[\"id\"], commands[\"confirm\"])\n assert 0 == result.exit_code, format_result_exception(result)\n create_from_template.assert_called_once()\n metadata = create_from_template.call_args[1][\"metadata\"]\n assert {\n \"__template_source__\",\n \"__template_ref__\",\n \"__template_id__\",\n \"__namespace__\",\n \"__repository__\",\n \"__project_slug__\",\n \"__sanitized_project_name__\",\n } <= set(metadata.keys())\n assert metadata[\"__template_source__\"] == \"renku\"\n assert metadata[\"__template_ref__\"] is None\n assert metadata[\"__template_id__\"] == template[\"id\"]\n assert metadata[\"__namespace__\"] == \"\"\n assert metadata[\"__repository__\"] == \"\"\n assert metadata[\"__project_slug__\"] == \"\"\n assert metadata[\"__sanitized_project_name__\"] == \"\"", "def test_get_project(self):\n self.assertEqual(self.remote_project.get_project(), self.project)", "def test_set_project_default_power_schedule(self):\n pass", "def testNeedProjectSetup(self, mock_ans):\n # Test need project setup.\n self.gcp_env_runner.project = \"\"\n self.gcp_env_runner.zone = \"\"\n self.assertTrue(self.gcp_env_runner._NeedProjectSetup())\n # Test no need project setup and get user's answer.\n self.gcp_env_runner.project = \"test_project\"\n self.gcp_env_runner.zone = \"test_zone\"\n self.gcp_env_runner._NeedProjectSetup()\n mock_ans.assert_called_once()", "def _create_dummy_project(self,projectname=\"testproject\"):\n # Create three types of users that exist: Root, can do anything, \n # projectadmin, cam do things to a project he or she owns. And logged in\n # user \n \n #created in _create_main_project_and_root.\n root = self.root\n # non-root users are created as if they signed up through the project, \n # to maximize test coverage. \n \n # A user who has created a project\n projectadmin = self._create_random_user(\"projectadmin_\")\n \n testproject = self._create_comicsite_in_admin(projectadmin,projectname)\n create_page_in_admin(testproject,\"testpage1\")\n create_page_in_admin(testproject,\"testpage2\")\n \n # a user who explicitly signed up to testproject\n participant = self._create_random_user(\"participant_\")\n self._register(participant,testproject)\n \n # a user who only signed up but did not register to any project\n registered_user = self._create_random_user(\"comicregistered_\")\n \n #TODO: How to do this gracefully? \n return [testproject,root,projectadmin,participant,registered_user]", "def GetProject(args):\n return args.project or properties.VALUES.core.project.GetOrFail()", "def test_list_projects(self):\n pass", "def test_list_projects(self):\n pass", "def getProjectName():", "def test_set_project_itar_information(self):\n pass", "def test_project_unspecified(self):\n key = self.create_entity(None, 'name', ['key:value'])\n urls = snapshots.fetch(key)\n self.failIf(urls)", "def setup_project(project_name):\n\n project_arn = ''\n for project in device_farm.list_projects()['projects']:\n if project['name'] == project_name:\n print('{} project already exists'.format(project_name))\n project_arn = project['arn']\n else:\n print(\n '{} project is not available, creating new one'.format(\n project_name\n )\n )\n project_arn = create_project(project_name)\n\n return project_arn\n\n raise KeyError('Problem finding project %r' % project_name)", "def startproject(self):\n\n path = os.path.join(self.path, self.project_name)\n if os.path.exists(path):\n raise exceptions.ProjectDirectoryAlreadyExistsError(self.project_name)\n else:\n os.makedirs(path)\n\n context = {\n 'project_name': self.project_name,\n 'default_region': self.region,\n 'random': hashlib.sha1(six.text_type(random.random()).encode('utf-8')).hexdigest()[:8]\n }\n\n self._clone_defaults(\n os.path.join(self.root, 'defaults', 'project'),\n path,\n context\n )", "def get_project(arn=None):\n pass", "def test_empty_projects(self, client, site, homepage):\n response = client.get(homepage.relative_url(site))\n assertTemplateNotUsed(response, \"projects/snippets/project_card.html\")", "def test_default_context():\n rally = Rally(server=RALLY, user=RALLY_USER, password=RALLY_PSWD, server_ping=False)\n context1 = rally.contextHelper.currentContext()\n workspace = rally.getWorkspace()\n project = rally.getProject()\n context2 = rally.contextHelper.currentContext()\n assert context1 == context2\n assert context1.workspace == DEFAULT_WORKSPACE\n assert workspace.Name == DEFAULT_WORKSPACE\n assert context1.project == DEFAULT_PROJECT\n assert project.Name == DEFAULT_PROJECT\n url = makeResourceUrl(rally, 'Defect')\n #print(url)\n expected_workspace_clause = 'workspace=workspace/%s' % str(workspace.oid)\n assert expected_workspace_clause in url\n expected_project_clause = 'project=project/%s' % str(project.oid)\n assert expected_project_clause in url", "def test_get_projects_throws_if_project_does_not_exist(fc: fetcher.Fetcher):\n with pytest.raises(exceptions.NotFoundError) as exc:\n fc.get_projects(\"BadProject\")\n assert \"An error occured while getting projects.\" in str(exc.value)", "def test_get_default_settings_path():\n\n root_path = application_services.get_pyrin_main_package_path()\n default_settings_path = os.path.abspath(os.path.join(root_path, 'settings', 'default'))\n assert application_services.get_default_settings_path() == default_settings_path", "def test_config_get(self):\n test_name = sys._getframe().f_code.co_name\n self.env.config.set('project', 'name', 'Test project')\n rv, output = self._execute('config get project name')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def make_default_config(project):\n return {\n \"breathe_projects\": {\n project: \"./_doxygen/xml\"\n },\n \"breathe_default_project\": project,\n \"exhale_args\": {\n # required arguments\n \"containmentFolder\": \"./api\",\n \"rootFileName\": \"{0}_root.rst\".format(project),\n \"rootFileTitle\": \"``{0}`` Test Project\".format(project),\n \"doxygenStripFromPath\": \"..\",\n # additional arguments\n \"exhaleExecutesDoxygen\": True,\n \"exhaleDoxygenStdin\": \"INPUT = ../include\"\n }\n }", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/aoi/\")\n self.assertEqual(response.status_code, 404)", "def test_not_authed_public_project(self):\n # Clear out existing project with ID=1 if necessary.\n Project.objects.filter(id=1).delete()\n locale = LocaleFactory.create(code='fakelocale')\n project = ProjectFactory.create(id=1, slug='valid-project', locales=[locale])\n ResourceFactory.create(project=project)\n\n response = self.client.get('/fakelocale/valid-project/')\n assert_equal(response.status_code, 200)\n # I'd assertTemplateUsed here but it doesn't work on non-DTL\n # templates.", "def test_defaults():\n config = Config(\n env_var='DO_NOT_USE',\n env_prefix='DO_NOT_USE',\n entry_point_name='DO_NOT_USE',\n )\n\n assert not config.keys()", "def project_no_init(\n tmp_path: Path,\n mocker: MockerFixture,\n core: Core,\n pdm_session: type[PDMSession],\n monkeypatch: pytest.MonkeyPatch,\n build_env: Path,\n) -> Project:\n test_home = tmp_path / \".pdm-home\"\n test_home.mkdir(parents=True)\n test_home.joinpath(\"config.toml\").write_text(\n '[global_project]\\npath = \"{}\"\\n'.format(test_home.joinpath(\"global-project\").as_posix())\n )\n p = core.create_project(tmp_path, global_config=test_home.joinpath(\"config.toml\").as_posix())\n p.global_config[\"venv.location\"] = str(tmp_path / \"venvs\")\n mocker.patch.object(BaseEnvironment, \"_build_session\", pdm_session)\n mocker.patch(\"pdm.builders.base.EnvBuilder.get_shared_env\", return_value=str(build_env))\n tmp_path.joinpath(\"caches\").mkdir(parents=True)\n p.global_config[\"cache_dir\"] = tmp_path.joinpath(\"caches\").as_posix()\n python_path = find_python_in_path(sys.base_prefix)\n if python_path is None:\n raise ValueError(\"Unable to find a Python path\")\n p._saved_python = python_path.as_posix()\n monkeypatch.delenv(\"VIRTUAL_ENV\", raising=False)\n monkeypatch.delenv(\"CONDA_PREFIX\", raising=False)\n monkeypatch.delenv(\"PEP582_PACKAGES\", raising=False)\n monkeypatch.delenv(\"NO_SITE_PACKAGES\", raising=False)\n pythonpath = os.getenv(\"PYTHONPATH\", \"\")\n pythonpath = remove_pep582_path_from_pythonpath(pythonpath)\n if pythonpath:\n monkeypatch.setenv(\"PYTHONPATH\", pythonpath)\n return p", "def test_load_no_project():\n\n assert_raises(Exception, inventory.load, PROJECT_NAME)", "def test_owner(self):\n self.assertIsNone(self.env.project_repo_owner)", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\n \"/api/v2/projects/999/queries/notasks/\",\n headers={\"Authorization\": self.user_session_token},\n )\n self.assertEqual(response.status_code, 404)", "def __init__(__self__, *,\n project: Optional[pulumi.Input[str]] = None):\n if project is not None:\n pulumi.set(__self__, \"project\", project)", "def test_add_trusted_project(self):\n pass", "def initilize(self):\n if not self.project_path.exists():\n self.project_path.mkdir()", "def test_update_project(self):\n pass", "def test_update_project(self):\n pass", "def test_set_project_default_virtualization_realm(self):\n pass", "def test_not_authed_nonpublic_project(self):\n # Clear out existing project with ID=1 if necessary.\n Project.objects.filter(id=2).delete()\n locale = LocaleFactory.create(code='fakelocale')\n project = ProjectFactory.create(id=2, slug='valid-project', locales=[locale])\n ResourceFactory.create(project=project)\n\n response = self.client.get('/fakelocale/valid-project/')\n assert_redirects(response, reverse('pontoon.home'))\n assert_equal(self.client.session['translate_error'], {'redirect': '/fakelocale/valid-project/'})", "def test_retrieve_project_unset(self):\n setting_name = 'project_str_setting'\n default_value = app_settings.get_default(EX_APP_NAME, setting_name)\n q_kwargs = {\n 'app_plugin__name': EX_APP_NAME,\n 'name': setting_name,\n 'project': self.project,\n }\n AppSetting.objects.get(**q_kwargs).delete()\n\n url = reverse(\n 'projectroles:api_project_setting_retrieve',\n kwargs={'project': self.project.sodar_uuid},\n )\n get_data = {'app_name': EX_APP_NAME, 'setting_name': setting_name}\n response = self.request_knox(url, data=get_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n response_data = json.loads(response.content)\n expected = {\n 'app_name': EX_APP_NAME,\n 'project': str(self.project.sodar_uuid),\n 'user': None,\n 'name': setting_name,\n 'type': 'STRING',\n 'value': default_value,\n 'user_modifiable': True,\n }\n self.assertEqual(response_data, expected)\n self.assertIsInstance(AppSetting.objects.get(**q_kwargs), AppSetting)", "def test_list_project_request(self):\n pass", "def test_set_project_no_user(self):\n setting_name = 'project_user_str_setting'\n url = reverse(\n 'projectroles:api_project_setting_set',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'app_name': EX_APP_NAME,\n 'setting_name': setting_name,\n 'value': 'value',\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(AppSetting.objects.count(), 0)", "def test_get_source_site(self):\n self.assertEqual(self.project.get_source_site(), None)", "def test_empty_project_create(self):\n\n responses.add(\n responses.POST,\n self.host + \"/manager\",\n json={'message': \"Project name cannot be empty.\", 'status':\"error\"},\n status=200\n )\n\n with self.assertRaises(CreateError):\n self.azk.create('', 'description')", "def test_good_projects(self):\n # name path main_lang\n self.do_test_good('bar', 'tmp/bunny', 'py')\n self.do_test_good('banana', 'tmp/frog', 'c')\n self.do_test_good('grinch', 'tmp/abc/def')\n self.do_test_good('grinch', 'tmp/pqr')", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\n \"/api/v2/projects/queries/999/similar-projects/\",\n headers={\"Authorization\": self.user_session_token},\n )\n self.assertEqual(response.status_code, 404)", "def get_defaults():\n\n # get package defaults\n with open(os.path.join(iLoop_RNAseq_pipeline.__path__[0], 'defaults', 'RNAseq_pipeline_defaults.txt')) as rpd:\n defaults = {}\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n\n try:\n with open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt')) as rpd:\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n except FileNotFoundError:\n logger.warning('\"RNAseq_pipeline_defaults.txt\" does not exist under home path. An email address and project ID should be should be define in that file.')\n\n # replace with user defaults\n try:\n with open('RNAseq_pipeline_defaults.txt') as rpd:\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n except FileNotFoundError:\n logger.info(\n '\"RNAseq_pipeline_defaults.txt\" does not exist under this folder. Defaults from the package and home path will be used.')\n\n if 'email' not in defaults:\n if not validate_email(defaults['email']):\n while True:\n email = input('Enter a valid email address for job status: \\n')\n if validate_email(email):\n defaults['email'] = email\n print('Writing email to \"RNAseq_pipeline_defaults.txt\" under home path.')\n f = open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt'), 'w+')\n f.write('\\nemail,{}'.format(email))\n f.close()\n break\n else:\n print('{} is not valid, try again.'.format(email))\n\n if ('project' not in defaults) or (defaults['project'] == 'projectid'):\n project = input('Enter Computerome project ID for billing: \\n')\n # TODO It is possible to validate this by checking folder name under \"/home/projects\".\n defaults['project'] = project\n print('Writing project ID to \"RNAseq_pipeline_defaults.txt\" under home path.')\n f = open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt'), 'w+')\n f.write('\\nproject,{}'.format(project))\n f.close()\n\n return defaults", "def test_create_project_unknown_user(self):\n self.assertEqual(Project.objects.count(), 2)\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': INVALID_UUID,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 400)\n self.assertEqual(Project.objects.count(), 2)", "def fetch_and_validate_project(\n launch_project: LaunchProject, api: Api\n) -> LaunchProject:\n if launch_project.source == LaunchSource.DOCKER:\n return launch_project\n if launch_project.source == LaunchSource.LOCAL:\n if not launch_project._entry_points:\n wandb.termlog(\n f\"{LOG_PREFIX}Entry point for repo not specified, defaulting to `python main.py`\"\n )\n launch_project.add_entry_point([\"python\", \"main.py\"])\n elif launch_project.source == LaunchSource.JOB:\n launch_project._fetch_job()\n else:\n launch_project._fetch_project_local(internal_api=api)\n\n assert launch_project.project_dir is not None\n # this prioritizes pip, and we don't support any cases where both are present\n # conda projects when uploaded to wandb become pip projects via requirements.frozen.txt, wandb doesn't preserve conda envs\n if os.path.exists(\n os.path.join(launch_project.project_dir, \"requirements.txt\")\n ) or os.path.exists(\n os.path.join(launch_project.project_dir, \"requirements.frozen.txt\")\n ):\n launch_project.deps_type = \"pip\"\n elif os.path.exists(os.path.join(launch_project.project_dir, \"environment.yml\")):\n launch_project.deps_type = \"conda\"\n\n return launch_project", "def test_project_list_with_no_projects(self):\n response = self.client.get(reverse('portfolio:project_list'))\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, 'There are no portfolio projects.')", "def test_search_project(self):\n title = Project.search_project(\"dee\")\n self.assertTrue(len(title) > 0)", "def test_no_such_project(self):\n def mock_send_request(*args, **kwargs):\n return Response().update(\n remote_source_directory=directory\n ).response\n\n directory = os.path.dirname(os.path.realpath(__file__))\n response = support.run_remote_command(\n command='sync',\n mock_send_request=mock_send_request\n )\n self.assert_has_error_code(response, 'NO_PROJECT')", "def test_create_project_from_template(self):\n project_new = self.project_template.take_template()\n\n self.assertTrue(project_new)", "def setUp_base(self):\n self._create_main_project_and_root()", "def test_empty_repository(self):\n os.environ['REPOSITORY'] = ''\n self.assertEqual(self.env.project_repo_name, '')", "def test_parser_init_and_without_project_type(parser):\n with pytest.raises(SystemExit):\n parser.parse_args(['--init'])", "def test_simple_alias_to_project(self):\n\n # Setup aliases\n self.t.config(\"alias.foo\", \"_projects\")\n self.t.config(\"alias.bar\", \"foo\")\n self.t.config(\"alias.baz\", \"bar\")\n self.t.config(\"alias.qux\", \"baz\")\n\n # Setup a task with dummy project called Home\n expected = \"Home\"\n self.t(\"add project:{0} foo\".format(expected))\n\n # Sanity check that _projects command outputs the \"Home\" project\n code, out, err = self.t(\"_projects\")\n self.assertIn(expected, out,\n msg=\"task _projects -> Home\")\n\n # Check that foo command outputs the \"Home\" project\n code, out, err = self.t(\"foo\")\n self.assertIn(expected, out,\n msg=\"task foo -> _projects > Home\")\n\n # Check that bar command outputs the \"Home\" project\n code, out, err = self.t(\"bar\")\n self.assertIn(expected, out,\n msg=\"task bar -> foo > _projects > Home\")\n\n # Check that baz command outputs the \"Home\" project\n code, out, err = self.t(\"baz\")\n self.assertIn(expected, out,\n msg=\"task baz -> bar > foo > _projects > Home\")\n\n # Check that qux command outputs the \"Home\" project\n code, out, err = self.t(\"qux\")\n self.assertIn(expected, out,\n msg=\"task qux -> baz > bar > foo > _projects > Home\")", "def create_project(name=None, defaultJobTimeoutMinutes=None):\n pass", "def parse_no_project(self, args):\n worktree = self.linguist_worktree.worktree\n parser = qisys.parsers.WorkTreeProjectParser(worktree)\n worktree_projects = parser.parse_no_project(args)\n if not worktree_projects:\n raise CouldNotGuessProjectName()\n # WorkTreeProjectParser returns None or a list of one element\n worktree_project = worktree_projects[0]\n linguist_project = new_linguist_project(self.linguist_worktree, worktree_project)\n if not linguist_project:\n raise CouldNotGuessProjectName()\n return self.parse_one_project(args, linguist_project.name)", "def atlas_projects():\n pass", "def test_no_setting(self):\n with self.assertRaises(ImproperlyConfigured):\n import_from_setting('DOES_NOT_EXIST')", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/summary/\")\n self.assertEqual(response.status_code, 404)", "def getProjectURL():", "def random_project(**overrides) -> Dict[str, Any]:\n data = dict(\n uuid=fake.uuid4(),\n name=fake.word(),\n description=fake.sentence(),\n prj_owner=fake.pyint(),\n thumbnail=fake.image_url(width=120, height=120),\n access_rights={},\n workbench={},\n published=False,\n )\n data.update(overrides)\n return data", "def test_create_project_target_enabled(self):\n self.assertEqual(Project.objects.count(), 2)\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': str(self.user.sodar_uuid),\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 201, msg=response.content)\n self.assertEqual(Project.objects.count(), 3)", "def __project(uri):\n uri = uri.lower().split('/')[-1].split('_')[0]\n project = {\n 'as': \"ICOS\",\n 'es': \"ICOS\",\n 'os': \"ICOS\",\n 'neon': 'NEON',\n 'ingos': 'INGOS',\n 'fluxnet': 'FLUXNET'\n }\n\n if uri in project:\n return project.get(uri)\n else:\n return 'other'", "def _create_main_project_and_root(self): \n if len(ComicSite.objects.filter(short_name=settings.MAIN_PROJECT_NAME)) == 0:\n main = ComicSite.objects.create(short_name=settings.MAIN_PROJECT_NAME,\n description=\"main project, autocreated by comicframeworkTestCase._create_inital_project()\",\n skin=\"fakeskin.css\"\n )\n \n main.save()\n \n try:\n self.root = User.objects.get(username='root')\n except ObjectDoesNotExist:\n # A user who has created a project\n root = User.objects.create_user('root',\n '[email protected]',\n 'testpassword') \n root.is_staff = True\n root.is_superuser = True\n root.save()\n \n self.root = root\n\n call_command('check_permissions')", "def test_missing_inputs(self):\n pp = ProjectParser()\n project = \"\"\"file://result <- file://file1, file://README.md\"\"\"\n pp.set_project(project)\n workflow = pp.parse_project()\n missing = workflow.missing_inputs()\n assert len(missing) == 1\n assert missing[0].url == \"file://file1\"", "def returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/priority-areas/\")\n self.assertEqual(response.status_code, 404)", "def project_presets(project):\n\n return None", "def test_project_path(self):\n\n # Without arguments\n project_root_path = os.path.abspath(os.path.join(\n MY_DIRECTORY, '..', '..'\n ))\n self.assertEqual(project_root_path, paths.project())\n\n # With arguments\n self.assertEqual(\n MY_PATH,\n paths.project('tracksim', 'tests', 'test_tracksim.py')\n )", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")" ]
[ "0.7302375", "0.69367903", "0.6899856", "0.6899856", "0.6899856", "0.6896533", "0.6896533", "0.6896533", "0.68203735", "0.6759572", "0.6759572", "0.67091924", "0.6589286", "0.65550405", "0.6552097", "0.65425485", "0.6540739", "0.6533683", "0.63593394", "0.6352195", "0.6341002", "0.63120073", "0.6301329", "0.6293345", "0.6279331", "0.6270914", "0.62369347", "0.61655974", "0.615675", "0.6149929", "0.61094135", "0.6104614", "0.60972834", "0.6093082", "0.6093082", "0.60715055", "0.6065952", "0.60561764", "0.6053298", "0.6048049", "0.60439575", "0.60404426", "0.60240024", "0.6022733", "0.60093683", "0.6004599", "0.6004161", "0.59956515", "0.5992463", "0.5984208", "0.5981035", "0.5979965", "0.5971739", "0.59710413", "0.5969964", "0.5967577", "0.59496033", "0.5948133", "0.5948133", "0.59349513", "0.5922349", "0.5917052", "0.5906004", "0.5888871", "0.5886731", "0.5873298", "0.5873019", "0.58513546", "0.5835387", "0.58219737", "0.58150333", "0.5805532", "0.57955724", "0.579465", "0.57904816", "0.5779997", "0.57628703", "0.57603437", "0.57552904", "0.5753894", "0.5741438", "0.5739465", "0.57273287", "0.57212746", "0.5720612", "0.57200015", "0.57195973", "0.57157475", "0.57146186", "0.571073", "0.5708498", "0.5700178", "0.5699274", "0.56973666", "0.56973666", "0.56973666", "0.56973666", "0.56973666", "0.56973666", "0.56973666" ]
0.7610206
0
Should return values from the internal _step object.
def test_step_properties(self, _step: PropertyMock): now = datetime.utcnow() _step.return_value = MagicMock( start_time=now, end_time=now, elapsed_time=0, is_visible=True ) es = exposed.ExposedStep() self.assertEqual(now, es.start_time) self.assertEqual(now, es.end_time) self.assertEqual(0, es.elapsed_time)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSteps():", "def _step(self) -> None:", "def _get_steps(self):\n return self.steps", "def get_steps(self):\n return self.steps", "def step_values(self):\n return self._get_values().copy()", "def _step(self):\n pass", "def step ( self ) :\n return self.__step", "def step(self):\n return self._step", "def getCurrentStep():", "def value(self, step):\n raise NotImplementedError", "def step(self):\r\n raise NotImplementedError", "def get_steps(self):\n return self.steps", "def step(self):\n raise NotImplementedError", "def do_step(self) -> None:", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n\n pass", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def get_step(self):\n return self.step", "def get_step(self):\n return self.step", "def step_env(self):\n raise NotImplementedError\n # Not needed for this homework", "def getStep():\n # TODO: can there be non-Step logs?", "def step_forward(self):", "def getStep(self, *args):\n return _CompuCell.Simulator_getStep(self, *args)", "def _step(self, a):\n obs, rew, done, info = super()._step(a)\n # if self.robot.body_xyz[0] > self.threshold:\n # rew = 1.0\n # self.threshold += 1\n # else:\n # rew = 0.0\n # self.steps += 1\n # if self.steps > self.max_episode_steps:\n # done = True\n return obs, rew, done, info", "def step(self, state):", "def step(self):\n value = self.current_event[\"step\"][\"value\"]\n self.current_value.append(value)", "def step(self, step, observation, **extra_feed):\n extra_feed['act_step'] = step\n a, v, state, neglogp = self._evaluate([self.act_action, self.vf, self.state, self.act_neglogp], observation, **extra_feed)\n if state.size == 0:\n state = None\n return a, v, state, neglogp", "def train(self, steps):\r\n for e in range(steps):\r\n # do something...\r\n pass\r\n return self.get_value_function()", "def GetTimestepValues(self):\n if self.__timesteps is None: self.__timesteps = self.__SetInputTimesteps()\n # self.__timesteps should already be of type list\n return self.__timesteps if self.__timesteps is not None else None", "def step(self,\n actn: int) -> Tuple[np.array, float, bool]:\n raise NotImplementedError", "def raw_steps(self):\n return self.obj_payload[\"steps\"]", "def getSteps( self ):\n\n return self.adb.get( 'steps' )", "def _step(self, a):\n obs, rew, done, info = super()._step(a)\n # rew = +1 if past int threshold for first time in episode\n # if self.robot.body_xyz[0] > self.threshold:\n # self.threshold += 1\n # rew = 1.0\n # else:\n # rew = 0.0\n # self.steps += 1\n # if self.steps > self.max_episode_steps:\n # done = True\n return obs, rew, done, info", "def __call__(self, new_val, previous_val, step):\n\t\treturn", "def total_steps(self) -> global___Expression:", "def _step(self, whence):\n pass", "def step(self,inp): ## function responsible for exciting the machine with a SINGLE INPUT VALUE\n (s, o) = self.getNextValues(self.state,inp)\n # will store the state and return the output\n self.state =s\n return o", "def step(self, **kwargs):\n pass", "def GetStepTaken(self):\n return _gmat_py.Propagator_GetStepTaken(self)", "def step(self, observation):\n action, value = self(observation)\n\n return {\n 'actions': action,\n 'values': value\n }", "def get_time_step_values(self):\n return DiscretizeMeshReader.get_time_step_values(self)", "def get_step(self):\n # decide which direction and how far\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def step(self, step=None):\n pass", "def _step(self) -> int:\n return self._config[CONF_STEP]", "def step(\n self, actions: ActionDict\n ) -> tuple[\n ObsDict, dict[str, float], dict[str, bool], dict[str, bool], dict[str, dict]\n ]:\n raise NotImplementedError", "def _setVals(self, step=0):\n self.step = step", "def get_time_step_values(self):\n if self.need_to_read():\n self._read_up_front()\n return self._timesteps if self._timesteps is not None else None", "def state_step(self) -> float:\n raise NotImplementedError", "def _prey_step(self):\n raise NotImplementedError()", "def step(self):\n rtn = self.loc\n for i in range(0,self.space):\n rtn = self._step(rtn)\n\n self.steps += 1\n self.loc = np.copy(rtn) # necessary?\n\n if self.record_steps:\n self.history = np.concatenate((self.history, [rtn]), axis=0)\n\n assert(self.history.shape == (self.steps, self.dim))\n\n return rtn", "def previous_step_result(self):\n return self._previous_step_result", "def step(self):\n return _uhd_swig.meta_range_t_step(self)", "def get_step(self) -> int:\n return self.step", "def cur_step(self):\n return self._cur_step", "def get_time_step_values(self):\n return OcTreeReader.get_time_step_values(self)", "def get_time_step_values(self):\n return GravGradReader.get_time_step_values(self)", "def get_step(self):\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def get_step(self):\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def get_time_step_values(self):\n return TensorMeshReader.get_time_step_values(self)", "def get_time_step_values(self):\n return GravObsReader.get_time_step_values(self)", "def get_steps_num():\n return 0", "def value_steps(self, steps):\n return self.initial_value * self.schedule(steps / self.nvalues)", "def step_points(self) -> np.array:\n if self._data is None:\n return np.array([])\n return self._data.index.values", "def do_steps(self):\n steps = self.get_step_conf()\n all_step_config = dict()\n for k, v in steps.items():\n tmp_list = list()\n all_step_config[k] = tmp_list\n start = v[\"Start Value\"]\n end = v[\"End Value\"]\n # special handling of edge length\n if(k == \"Edge Length\"):\n start = self.convert_to_tuple(start)\n end = self.convert_to_tuple(end)\n tmp_list.append(str(start))\n while(start != end):\n start = self.add_edge_length(\n start, self.convert_to_tuple(v[\"Step\"]))\n tmp_list.append(str(start))\n print start\n else:\n tmp_list.append(float(start))\n while float(start) < float(end):\n start = float(start) + float(v[\"Step\"])\n tmp_list.append(start)\n return all_step_config", "def eval_step(self, *args, **kwargs):\n raise NotImplementedError", "def get_time_step_values(self):\n return TopoReader.get_time_step_values(self)", "def steps(self):\n for step in self._steps:\n yield step", "def step(self, observation: dict) -> dict:\n raise NotImplementedError(\"step\")", "def step(self) -> int:\n return self._step", "def step(self, action: np.ndarray) -> Tuple[np.ndarray, np.float64, bool, dict]:\n next_state, reward, done, info = self.env.step(action)\n return next_state, reward, done, info", "def Step(self, *args):\n return _gmat_py.Propagator_Step(self, *args)", "def _step(self, a):\n state, rew, done, info = super()._step(a)\n render = self.get_render_obs()\n return render, sum(self.rewards), bool(done), {}", "def _e_step(self, x):\n return self.get_posterior(x)", "def _get_next_point(self):\n #Get the index of the current step in each dimension\n nparams = len(self.transform.get_params())\n indices = [0]*nparams\n #Get the number of steps in each dimension\n lengths = [len(self.steps[i]) for i in range(nparams)]\n\n end = False\n while not end:\n yield [self.steps[i][indices[i]] for i in range(nparams)]\n\n #Increment the index of the last paramenter and then check whether it goes over the end\n indices[-1] += 1\n for p in reversed(range(nparams)):\n if indices[p] == lengths[p]:\n indices[p] = 0\n if p > 0:\n indices[p-1] += 1\n else:\n end = True", "def step(self, inp):\n nState, output = self.getNextValues(self.currState, inp)\n self.currState = nState\n return output", "def take_step(self):\n choices_of_steps = [(0,1), (1,0), (0,-1), (-1,0)]\n return random.choices(choices_of_steps)[0]", "def step(self, move):", "def _sample_steps(self):\n mixture_size = self.parameters['fixed_mixture_size']\n if not self.is_correlated_mixture and mixture_size is None:\n return self.get_steps('monte_carlo')\n else:\n return self.get_steps('metropolis')", "def step(self, action):\n (self.state, self.reward, self.terminal, self.truncated,\n self.info) = self.env.step(action)\n\n return self.state, self.reward, self.terminal, self.truncated, self.info", "def get_time_step_values(self):\n return MagObsReader.get_time_step_values(self)", "def perform_step(self) -> None:\n pass", "def get_step(self):\n direction = choice([1,-1])\n direction = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def steps(self) -> pulumi.Output[Sequence['outputs.StepResponse']]:\n return pulumi.get(self, \"steps\")", "def process_step(self, observation, reward, done, info):\n observation = self.process_observation(observation)\n reward = self.process_reward(reward)\n info = self.process_info(info)\n return observation, reward, done, info", "def __getitem__( self, stepNum ):\n assert isinstance( stepNum, int )\n\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n return self._steps[ stepNum - 1 ]", "def step(self):\n if self.dynamic:\n self._update_db_obj()\n return self._db_obj.step", "def _get_step_price(self):\n return self._step_counter_factory.get_step_price()", "def get_time_step_values(self):\n return TensorMeshAppender.get_time_step_values(self)", "def horde_step(self, observation):", "def step(\n self,\n actions,\n ) -> Tuple[\"next_state\", \"reward\", \"done\", \"env_info\"]:\n env_info = self.env.step(actions)[self.brain_name]\n next_states = env_info.vector_observations\n rewards = env_info.rewards\n dones = env_info.local_done\n return (next_states, rewards, dones, env_info)", "def _step(self, action: types.NestedArray) -> ts.TimeStep:", "def step(self, action):\n obs, r, done, info = self.env.step(action)\n obs = self.get_observation(obs)\n return obs, r, self.is_done(), info", "def get_view_steps(self):\n return self._data_dict[self.KEY_VIEW_STEPS]", "def record(self, step):", "def expansion_steps(self):\n return self._p", "def best_step(self):\r\n return self._best_value_step" ]
[ "0.7940937", "0.75706524", "0.7514424", "0.7484905", "0.7470057", "0.73012173", "0.7286416", "0.7277636", "0.72470057", "0.713876", "0.7031306", "0.69861174", "0.69578314", "0.69092953", "0.6793182", "0.6793182", "0.6793182", "0.6793182", "0.6725299", "0.6709949", "0.6709949", "0.6709949", "0.67003274", "0.67003274", "0.65547025", "0.6544097", "0.6518503", "0.6514504", "0.64811164", "0.64746", "0.6440938", "0.6438689", "0.64351815", "0.6430772", "0.6426554", "0.64263815", "0.63883436", "0.63873816", "0.6385068", "0.6381802", "0.63800013", "0.63669235", "0.6358107", "0.63183516", "0.63171154", "0.6310506", "0.6289351", "0.6282998", "0.6271136", "0.6268105", "0.6266258", "0.6265388", "0.6262067", "0.6260861", "0.6230867", "0.62290543", "0.62117815", "0.6210761", "0.6187509", "0.61729515", "0.6154421", "0.6147444", "0.6147444", "0.6143075", "0.6142342", "0.6140628", "0.613798", "0.6129236", "0.6114425", "0.61115736", "0.6100858", "0.60984683", "0.6097571", "0.60882485", "0.6069015", "0.6060969", "0.6054641", "0.6052374", "0.604492", "0.6035663", "0.6027126", "0.6027052", "0.6013548", "0.60124785", "0.600892", "0.60009176", "0.5998138", "0.5997094", "0.59914947", "0.5986158", "0.59843737", "0.5976863", "0.5974366", "0.59733564", "0.59730947", "0.5971303", "0.5969903", "0.59657323", "0.59615177", "0.5959662", "0.5958228" ]
0.0
-1
Should return values from the internal _step object.
def test_step_visibility(self, _step: PropertyMock): _step.return_value = MagicMock(is_visible=True) es = exposed.ExposedStep() self.assertTrue(es.visible) es.visible = False self.assertFalse(es.visible)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSteps():", "def _step(self) -> None:", "def _get_steps(self):\n return self.steps", "def get_steps(self):\n return self.steps", "def step_values(self):\n return self._get_values().copy()", "def _step(self):\n pass", "def step ( self ) :\n return self.__step", "def step(self):\n return self._step", "def getCurrentStep():", "def value(self, step):\n raise NotImplementedError", "def step(self):\r\n raise NotImplementedError", "def get_steps(self):\n return self.steps", "def step(self):\n raise NotImplementedError", "def do_step(self) -> None:", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n\n pass", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def get_step(self):\n return self.step", "def get_step(self):\n return self.step", "def step_env(self):\n raise NotImplementedError\n # Not needed for this homework", "def getStep():\n # TODO: can there be non-Step logs?", "def step_forward(self):", "def getStep(self, *args):\n return _CompuCell.Simulator_getStep(self, *args)", "def _step(self, a):\n obs, rew, done, info = super()._step(a)\n # if self.robot.body_xyz[0] > self.threshold:\n # rew = 1.0\n # self.threshold += 1\n # else:\n # rew = 0.0\n # self.steps += 1\n # if self.steps > self.max_episode_steps:\n # done = True\n return obs, rew, done, info", "def step(self, state):", "def step(self):\n value = self.current_event[\"step\"][\"value\"]\n self.current_value.append(value)", "def step(self, step, observation, **extra_feed):\n extra_feed['act_step'] = step\n a, v, state, neglogp = self._evaluate([self.act_action, self.vf, self.state, self.act_neglogp], observation, **extra_feed)\n if state.size == 0:\n state = None\n return a, v, state, neglogp", "def train(self, steps):\r\n for e in range(steps):\r\n # do something...\r\n pass\r\n return self.get_value_function()", "def GetTimestepValues(self):\n if self.__timesteps is None: self.__timesteps = self.__SetInputTimesteps()\n # self.__timesteps should already be of type list\n return self.__timesteps if self.__timesteps is not None else None", "def step(self,\n actn: int) -> Tuple[np.array, float, bool]:\n raise NotImplementedError", "def raw_steps(self):\n return self.obj_payload[\"steps\"]", "def getSteps( self ):\n\n return self.adb.get( 'steps' )", "def _step(self, a):\n obs, rew, done, info = super()._step(a)\n # rew = +1 if past int threshold for first time in episode\n # if self.robot.body_xyz[0] > self.threshold:\n # self.threshold += 1\n # rew = 1.0\n # else:\n # rew = 0.0\n # self.steps += 1\n # if self.steps > self.max_episode_steps:\n # done = True\n return obs, rew, done, info", "def __call__(self, new_val, previous_val, step):\n\t\treturn", "def total_steps(self) -> global___Expression:", "def _step(self, whence):\n pass", "def step(self,inp): ## function responsible for exciting the machine with a SINGLE INPUT VALUE\n (s, o) = self.getNextValues(self.state,inp)\n # will store the state and return the output\n self.state =s\n return o", "def step(self, **kwargs):\n pass", "def GetStepTaken(self):\n return _gmat_py.Propagator_GetStepTaken(self)", "def step(self, observation):\n action, value = self(observation)\n\n return {\n 'actions': action,\n 'values': value\n }", "def get_time_step_values(self):\n return DiscretizeMeshReader.get_time_step_values(self)", "def get_step(self):\n # decide which direction and how far\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def step(self, step=None):\n pass", "def _step(self) -> int:\n return self._config[CONF_STEP]", "def step(\n self, actions: ActionDict\n ) -> tuple[\n ObsDict, dict[str, float], dict[str, bool], dict[str, bool], dict[str, dict]\n ]:\n raise NotImplementedError", "def _setVals(self, step=0):\n self.step = step", "def get_time_step_values(self):\n if self.need_to_read():\n self._read_up_front()\n return self._timesteps if self._timesteps is not None else None", "def state_step(self) -> float:\n raise NotImplementedError", "def _prey_step(self):\n raise NotImplementedError()", "def step(self):\n rtn = self.loc\n for i in range(0,self.space):\n rtn = self._step(rtn)\n\n self.steps += 1\n self.loc = np.copy(rtn) # necessary?\n\n if self.record_steps:\n self.history = np.concatenate((self.history, [rtn]), axis=0)\n\n assert(self.history.shape == (self.steps, self.dim))\n\n return rtn", "def previous_step_result(self):\n return self._previous_step_result", "def step(self):\n return _uhd_swig.meta_range_t_step(self)", "def get_step(self) -> int:\n return self.step", "def cur_step(self):\n return self._cur_step", "def get_time_step_values(self):\n return OcTreeReader.get_time_step_values(self)", "def get_time_step_values(self):\n return GravGradReader.get_time_step_values(self)", "def get_step(self):\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def get_step(self):\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def get_time_step_values(self):\n return TensorMeshReader.get_time_step_values(self)", "def get_time_step_values(self):\n return GravObsReader.get_time_step_values(self)", "def get_steps_num():\n return 0", "def value_steps(self, steps):\n return self.initial_value * self.schedule(steps / self.nvalues)", "def step_points(self) -> np.array:\n if self._data is None:\n return np.array([])\n return self._data.index.values", "def do_steps(self):\n steps = self.get_step_conf()\n all_step_config = dict()\n for k, v in steps.items():\n tmp_list = list()\n all_step_config[k] = tmp_list\n start = v[\"Start Value\"]\n end = v[\"End Value\"]\n # special handling of edge length\n if(k == \"Edge Length\"):\n start = self.convert_to_tuple(start)\n end = self.convert_to_tuple(end)\n tmp_list.append(str(start))\n while(start != end):\n start = self.add_edge_length(\n start, self.convert_to_tuple(v[\"Step\"]))\n tmp_list.append(str(start))\n print start\n else:\n tmp_list.append(float(start))\n while float(start) < float(end):\n start = float(start) + float(v[\"Step\"])\n tmp_list.append(start)\n return all_step_config", "def eval_step(self, *args, **kwargs):\n raise NotImplementedError", "def get_time_step_values(self):\n return TopoReader.get_time_step_values(self)", "def steps(self):\n for step in self._steps:\n yield step", "def step(self, observation: dict) -> dict:\n raise NotImplementedError(\"step\")", "def step(self) -> int:\n return self._step", "def step(self, action: np.ndarray) -> Tuple[np.ndarray, np.float64, bool, dict]:\n next_state, reward, done, info = self.env.step(action)\n return next_state, reward, done, info", "def Step(self, *args):\n return _gmat_py.Propagator_Step(self, *args)", "def _step(self, a):\n state, rew, done, info = super()._step(a)\n render = self.get_render_obs()\n return render, sum(self.rewards), bool(done), {}", "def _e_step(self, x):\n return self.get_posterior(x)", "def _get_next_point(self):\n #Get the index of the current step in each dimension\n nparams = len(self.transform.get_params())\n indices = [0]*nparams\n #Get the number of steps in each dimension\n lengths = [len(self.steps[i]) for i in range(nparams)]\n\n end = False\n while not end:\n yield [self.steps[i][indices[i]] for i in range(nparams)]\n\n #Increment the index of the last paramenter and then check whether it goes over the end\n indices[-1] += 1\n for p in reversed(range(nparams)):\n if indices[p] == lengths[p]:\n indices[p] = 0\n if p > 0:\n indices[p-1] += 1\n else:\n end = True", "def step(self, inp):\n nState, output = self.getNextValues(self.currState, inp)\n self.currState = nState\n return output", "def take_step(self):\n choices_of_steps = [(0,1), (1,0), (0,-1), (-1,0)]\n return random.choices(choices_of_steps)[0]", "def step(self, move):", "def _sample_steps(self):\n mixture_size = self.parameters['fixed_mixture_size']\n if not self.is_correlated_mixture and mixture_size is None:\n return self.get_steps('monte_carlo')\n else:\n return self.get_steps('metropolis')", "def step(self, action):\n (self.state, self.reward, self.terminal, self.truncated,\n self.info) = self.env.step(action)\n\n return self.state, self.reward, self.terminal, self.truncated, self.info", "def get_time_step_values(self):\n return MagObsReader.get_time_step_values(self)", "def perform_step(self) -> None:\n pass", "def get_step(self):\n direction = choice([1,-1])\n direction = choice([0, 1, 2, 3, 4])\n step = direction * distance\n return step", "def steps(self) -> pulumi.Output[Sequence['outputs.StepResponse']]:\n return pulumi.get(self, \"steps\")", "def process_step(self, observation, reward, done, info):\n observation = self.process_observation(observation)\n reward = self.process_reward(reward)\n info = self.process_info(info)\n return observation, reward, done, info", "def __getitem__( self, stepNum ):\n assert isinstance( stepNum, int )\n\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n return self._steps[ stepNum - 1 ]", "def step(self):\n if self.dynamic:\n self._update_db_obj()\n return self._db_obj.step", "def _get_step_price(self):\n return self._step_counter_factory.get_step_price()", "def get_time_step_values(self):\n return TensorMeshAppender.get_time_step_values(self)", "def horde_step(self, observation):", "def step(\n self,\n actions,\n ) -> Tuple[\"next_state\", \"reward\", \"done\", \"env_info\"]:\n env_info = self.env.step(actions)[self.brain_name]\n next_states = env_info.vector_observations\n rewards = env_info.rewards\n dones = env_info.local_done\n return (next_states, rewards, dones, env_info)", "def _step(self, action: types.NestedArray) -> ts.TimeStep:", "def step(self, action):\n obs, r, done, info = self.env.step(action)\n obs = self.get_observation(obs)\n return obs, r, self.is_done(), info", "def get_view_steps(self):\n return self._data_dict[self.KEY_VIEW_STEPS]", "def record(self, step):", "def expansion_steps(self):\n return self._p", "def best_step(self):\r\n return self._best_value_step" ]
[ "0.7940937", "0.75706524", "0.7514424", "0.7484905", "0.7470057", "0.73012173", "0.7286416", "0.7277636", "0.72470057", "0.713876", "0.7031306", "0.69861174", "0.69578314", "0.69092953", "0.6793182", "0.6793182", "0.6793182", "0.6793182", "0.6725299", "0.6709949", "0.6709949", "0.6709949", "0.67003274", "0.67003274", "0.65547025", "0.6544097", "0.6518503", "0.6514504", "0.64811164", "0.64746", "0.6440938", "0.6438689", "0.64351815", "0.6430772", "0.6426554", "0.64263815", "0.63883436", "0.63873816", "0.6385068", "0.6381802", "0.63800013", "0.63669235", "0.6358107", "0.63183516", "0.63171154", "0.6310506", "0.6289351", "0.6282998", "0.6271136", "0.6268105", "0.6266258", "0.6265388", "0.6262067", "0.6260861", "0.6230867", "0.62290543", "0.62117815", "0.6210761", "0.6187509", "0.61729515", "0.6154421", "0.6147444", "0.6147444", "0.6143075", "0.6142342", "0.6140628", "0.613798", "0.6129236", "0.6114425", "0.61115736", "0.6100858", "0.60984683", "0.6097571", "0.60882485", "0.6069015", "0.6060969", "0.6054641", "0.6052374", "0.604492", "0.6035663", "0.6027126", "0.6027052", "0.6013548", "0.60124785", "0.600892", "0.60009176", "0.5998138", "0.5997094", "0.59914947", "0.5986158", "0.59843737", "0.5976863", "0.5974366", "0.59733564", "0.59730947", "0.5971303", "0.5969903", "0.59657323", "0.59615177", "0.5959662", "0.5958228" ]
0.0
-1
Should abort stopping and not raise an error when no internal step is available to stop.
def test_step_stop_aborted(self, _step: PropertyMock): _step.return_value = None es = exposed.ExposedStep() es.stop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gracefully_stop(self):\n pass", "def halt(*_, **kwargs):\n raise ExecutionFinished(\"Reached halt\")", "def stop() -> None:", "def abort() -> NoReturn:\n raise AbortSignal", "def stop(self) -> None:\n ...", "def stop(self) -> None:", "def stop(self) -> None:", "def abort(self):\n try:\n self.acqRunning = False\n except:\n print('Cannot abort properly')", "def abort(self):\n raise NotImplementedError", "def _stop(self):", "def aborting(self):\n \n pass", "def __exit__(self, exc_type, exc_val, exc_tb) -> None:\n self.stop()", "def need_stop(self, path):", "def abort(self):\n print(\"abort\")", "def _prepare_to_stop(self):\n pass", "def stop(self):\r\n self.terminating = True", "def do_abort(self):\n self.abort = True\n if self.monitor: self.monitor.stop( )", "def stop():", "def stop():", "def stop():", "def stop():", "def stop(self):\n self.halt = True", "def test_stop_step_no_halt(self):\n support.create_project(self, 'homer2')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.shared.other = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop()',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.other = 1'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(project.shared.fetch('other'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def stop(self) -> None:\n pass", "def stop(self) -> None:\n raise NotImplementedError()", "def stop(self) -> None:\n raise NotImplementedError()", "def stop(self):\n raise NotImplementedError", "def stop(self):\n raise NotImplementedError", "def stop(self):\n raise NotImplementedError()", "def stop(self):\n raise NotImplementedError()", "def stop(self, message):\r\n raise StopTestException(message)", "def foreceStop(self):\n self.__success = False\n self.stop()", "def stop_check(self):\n pass", "def Stop(self, *_):\n self.Log('Stopping...')\n self._stop = True", "def Stop(self):\n raise NotImplementedError", "def stop_procedure(self):\n pass", "def stop(self) -> None:\n raise NotImplementedError(\"Base method not implemented\")", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def stop(self):\r\n raise NotImplementedError('method stop() is not implemented')", "def abort(self, message: str) -> None:\n message = f\"{Invocation.current.log} - {message}\"\n self.exception = StepException(message)\n global failure_aborts_build # pylint: disable=invalid-name\n global no_actions # pylint: disable=invalid-name\n if failure_aborts_build.value and not no_actions.value:\n no_additional_complaints()\n raise self.exception", "def _stop(self) -> None:\n self._stopped.set()", "def stop(self):\r\n pass", "def stop(self):\n self._stop_flag = True", "def stop (self):\n pass", "def stop (self):\n pass", "def _stop(self):\n return True", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def test_stop_step_and_halt(self):\n support.create_project(self, 'homer')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(halt=True)',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 3'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def abort(self):\r\n LOG(\"Aborting execution\")\r\n self.controller.abort()", "def stop(self):\n raise TaskError(\"Task %s: subclass should override stop() method!\" % (\n self))", "def __exit__(self, *args):\n self.stop()", "def stop(self):\n\t\tpass", "def stop(self):\n self._stop_event.set()", "def _maybe_stop_iteration(self, global_step, batch_count):\n if batch_count == self.STOP_BATCH_COUNT_PER_EPOCH:\n warnings.warn(\n \"The memory benchmark runner performs only \"\n + f\"{self.STOP_BATCH_COUNT_PER_EPOCH} steps per epoch.\"\n )\n raise StopIteration", "def stopTestRun(self):", "def stop(self):\n pub = rospy.Publisher('robot/set_super_stop', Empty, queue_size=10)\n baxter_dataflow.wait_for(\n test=lambda: self._state.stopped == True,\n timeout=3.0,\n timeout_msg=\"Failed to stop the robot\",\n body=pub.publish,\n )", "def stop(self) :\n raise NotImplementedError(\"stop not implemented\")", "def stop(self):\n self.exit.set()", "def stop(self):\n self.exit.set()", "def stop(self):\n self._should_run = False", "def stop(self):\n return", "def eStop(self):\n Step(speed=0, coils=1, steps=0, dir=Step.BRAKE)\n # #######################################################\n # Need to blink Stop and wait until Stop is pressed again\n # #######################################################", "def stop(self):", "def stop(self):", "async def _stop(self):\n return", "def __exit__(self, type=None, value=None, traceback=None):\n self.stop()", "def stop(self):\n if not self._thread or self._abort:\n return\n\n self._abort = True\n self._thread.join()", "def stop(self):\n self.requested_state = 'Stopped'\n self.ml_interface.stop()", "def stop(self):\n self.stopping = True\n self.queue_response(exc=ClientError())", "def testTrainingStop(self):\n # The `train()` function raises a RuntimeError\n with self.assertRaises(RuntimeError):\n train(\n self.params,\n RayDMatrix(self.x, self.y),\n callbacks=[_kill_callback(self.die_lock_file)],\n num_boost_round=20,\n ray_params=RayParams(max_actor_restarts=0, num_actors=2))", "def post_stop(self):", "def stop(self):\n return self.setup.stop", "def force_stop(self):\n #cancel any current request:\n self._cancel_current_request()", "async def stop(self) -> None:\n with STOP_BLUEPRINT.as_task(name=self.name):\n self._change_blueprint_state(BlueprintState.TERMINATING)\n\n try:\n for steps in reversed(self.execution_order):\n stop_steps = [step for step in steps if hasattr(step, \"stop\")]\n if stop_steps:\n NEXT_BOOTSTEPS.log(name=self.name, next_bootsteps=stop_steps)\n async with trio.open_nursery() as nursery:\n for step in stop_steps:\n _apply_step(nursery, step.stop)\n except Exception as e:\n self._change_blueprint_state((BlueprintState.FAILED, e))\n raise\n else:\n self._change_blueprint_state(BlueprintState.TERMINATED)", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def stop(self):\n self.stopped = True", "def stop(self):\n self._run = False", "def abort(self, extra=None):\n self.logger.info(f\"Aborting {self.client_name} during {self.phase} phase.\")\n if isinstance(self.trainer, Trainer):\n self.logger.info(f\"Aborting {self.client_name} trainer...\")\n self.trainer.interrupt()\n if isinstance(self.evaluator, Trainer):\n self.logger.info(f\"Aborting {self.client_name} evaluator...\")\n self.evaluator.interrupt()", "def stop(self):\n self._run = False\n self.IA.stop()", "def abort(self, wait=False):\n if self.running:\n self.stopping = True\n if wait:\n assert self.greenlet is not greenlet.getcurrent(), \"Can't abort with wait from inside the hub's greenlet.\"\n # schedule an immediate timer just so the hub doesn't sleep\n self.schedule_call_global(0, lambda: None)\n # switch to it; when done the hub will switch back to its parent,\n # the main greenlet\n self.switch()", "def stop_run(arn=None):\n pass", "def stop(self):\n if self.debug:\n print(\"%s stop\" % self.name)\n self.force_exit()", "def check_stop(self, check_messages=True):\n if check_messages:\n self.check_messages()\n if self.stop_request:\n self.stop_request=False\n raise ScriptStopException()", "def _thread_abort_breakout_point(self):\n self._require_controller_modes('thread_initialized')\n self.thread.abort_breakout_point()" ]
[ "0.7060672", "0.6969455", "0.6947691", "0.6831138", "0.6816764", "0.6798415", "0.6798415", "0.6757007", "0.6750949", "0.6735412", "0.6730144", "0.66933066", "0.66833615", "0.6682303", "0.6665194", "0.6663481", "0.66407984", "0.6631349", "0.6631349", "0.6631349", "0.6631349", "0.66267425", "0.66251135", "0.65959656", "0.65954393", "0.65954393", "0.65833724", "0.65833724", "0.6577739", "0.6577739", "0.65518576", "0.6545857", "0.65332425", "0.6515144", "0.6511379", "0.65076125", "0.65035325", "0.64906037", "0.6486791", "0.64822644", "0.6477269", "0.64669085", "0.645775", "0.64533144", "0.64533144", "0.64413035", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.643773", "0.6426402", "0.64249194", "0.64249194", "0.64249194", "0.64249194", "0.64249194", "0.64249194", "0.64143294", "0.64073014", "0.6399954", "0.6394778", "0.63920665", "0.639142", "0.6387223", "0.6385548", "0.6358083", "0.63540465", "0.63540465", "0.6345894", "0.63451105", "0.6337376", "0.6336644", "0.6336644", "0.63274276", "0.63133204", "0.63014966", "0.6296516", "0.6283434", "0.62758535", "0.627347", "0.62695587", "0.6266083", "0.6249086", "0.6231326", "0.62235934", "0.6222427", "0.6220061", "0.6208383", "0.62072426", "0.6202457", "0.619911", "0.61970055", "0.61928916" ]
0.7588589
0
Should abort stopping and not raise an error when no internal project is available to stop.
def test_project_stop_aborted(self, get_internal_project: MagicMock): get_internal_project.return_value = None ep = exposed.ExposedProject() ep.stop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stopBuild(reason=\"<no reason given>\"):", "def _gracefully_stop(self):\n pass", "def abort(self):\n try:\n self.acqRunning = False\n except:\n print('Cannot abort properly')", "def stop() -> None:", "def test_stop_project(self):\n support.create_project(self, 'homer3')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.project.stop()',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 3'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def _stop(self):", "def stop(self) -> None:\n ...", "def stop():", "def stop():", "def stop():", "def stop():", "def need_stop(self, path):", "def stop(self) -> None:", "def stop(self) -> None:", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def _prepare_to_stop(self):\n pass", "def shutdown(self):\n rospy.loginfo(\"Stopping Project\")\n rospy.sleep(1)", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def do_abort(self):\n self.abort = True\n if self.monitor: self.monitor.stop( )", "def exit_engine(self):\n self.stop_flag = True", "def aborting(self):\n \n pass", "def terminate(self):\n self._running = False", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def abort(self):\n print(\"abort\")", "def stop_run(arn=None):\n pass", "def stop(self) -> None:\n pass", "def stop_all():\n\twhile _running:\n\t\t_running[0].stop(noerror=True)", "def stop (self):\n pass", "def stop (self):\n pass", "def _stop(self):\n return True", "def stopTestRun(self):", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\r\n pass", "def __exit__(self, exc_type, exc_val, exc_tb) -> None:\n self.stop()", "def stop(self):\n\t\tpass", "def stop(self):\n self._should_run = False", "def stop(self):\n # Cleanup platform first.\n self.cleanup()\n\n if self.init_lhost:\n self._lhost.stop()\n\n self.status = False # pylint: disable=attribute-defined-outside-init", "def force_stop(self):\n #cancel any current request:\n self._cancel_current_request()", "def foreceStop(self):\n self.__success = False\n self.stop()", "def Stop(self, *_):\n self.Log('Stopping...')\n self._stop = True", "def stop(self):\n self.stopped = True", "def abort(self):\n raise NotImplementedError", "def stop(self):\n self._run = False", "async def _stop(self):\n return", "def stop(self):\n raise NotImplementedError()", "def stop(self):\n raise NotImplementedError()", "def stop(self):\n raise NotImplementedError", "def stop(self):\n raise NotImplementedError", "def exit(self, *args):\n self.stop('all')\n sys.exit(1)", "def stop(self):", "def stop(self):", "def gracefully_terminate(self):\n self.running = False", "def stop_check(self):\n pass", "def stop(self):\n if self.thread_state is True:\n os.system('qq stop')\n self.thread_state = False", "def stop(self):\n self._stop_flag = True", "def platform_stop(self):\n self.platform.stop()", "def abort() -> NoReturn:\n raise AbortSignal", "def stop(self):\r\n self.running = False", "def stop(self):\r\n self.running = False", "def stop(self):\n if self.debug:\n print(\"%s stop\" % self.name)\n self.force_exit()", "def stop(self) -> None:\n raise NotImplementedError()", "def stop(self) -> None:\n raise NotImplementedError()", "def abort(self):\r\n LOG(\"Aborting execution\")\r\n self.controller.abort()", "def stop(self):\n return self.setup.stop", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def __exit__(self, *args):\n self.stop()", "def InterfaceClientStop(self, exitCode=200): \n pass", "def stop(self):\n self.stopping = True\n self.queue_response(exc=ClientError())", "def __exit__(self):\n self._stop_all()", "def stop(self):\n self.api.stop()", "def stop(self):\n self._run = False\n self.IA.stop()", "def stop(self):\n self.halt = True", "def stop(self):\r\n self.terminating = True", "def _stop(self):\n self._pi.stop()", "def stop(self):\r\n raise NotImplementedError('method stop() is not implemented')", "def stop(self):\n for worker in self.workers:\n import sys; sys.stdout.flush()\n try: worker.exec_code('import sys;sys.exit(0)')\n except:\n #should really do something here to\n # trap non-SystemExit errors.\n pass", "def stop(self, message):\r\n raise StopTestException(message)", "def abort_requested():\n if KODI_VERSION_MAJOR > 13:\n return MONITOR.abortRequested()\n\n return xbmc.abortRequested", "def stop(self):\n return", "def Stop(self):\n raise NotImplementedError", "def stop(self):\n self.exit.set()" ]
[ "0.7033246", "0.6999605", "0.68412125", "0.6814739", "0.66516757", "0.6600153", "0.6580697", "0.6565686", "0.6565686", "0.6565686", "0.6565686", "0.65105325", "0.6509005", "0.6509005", "0.6490828", "0.64628285", "0.6435273", "0.6418074", "0.640065", "0.6397539", "0.63974357", "0.6396683", "0.63787335", "0.63787335", "0.63787335", "0.63787335", "0.63787335", "0.63787335", "0.6371794", "0.6360139", "0.6328897", "0.6324974", "0.63207835", "0.63207835", "0.6311347", "0.6307883", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.6304358", "0.62936705", "0.62883496", "0.6283596", "0.6274921", "0.62671477", "0.62600565", "0.6238501", "0.6237981", "0.62277204", "0.62267095", "0.6221113", "0.6220826", "0.6218487", "0.6218487", "0.62109625", "0.62109625", "0.61983454", "0.61971015", "0.61971015", "0.6191309", "0.6185721", "0.6180469", "0.6180262", "0.6179173", "0.6176591", "0.6160637", "0.6160637", "0.615976", "0.61552006", "0.61552006", "0.61512005", "0.61489415", "0.61476886", "0.61476886", "0.61476886", "0.61476886", "0.61476886", "0.61458755", "0.6140622", "0.61384994", "0.6128896", "0.61260575", "0.6119242", "0.61169887", "0.6112904", "0.6104876", "0.61007774", "0.6092484", "0.6085634", "0.6084411", "0.60830617", "0.6079674", "0.6054326" ]
0.7577744
0
Title should change through exposed project.
def test_change_title(self): test_title = 'Some Title' support.create_project(self, 'igor') cd.project.title = test_title self.assertEqual(cd.project.title, test_title)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def title(self) -> str:\n pass", "def get_title():", "def set_title(self, title):\n\t\tpass", "def title(self) -> str:\n raise NotImplementedError", "def title(self, title):\n\n self.container['title'] = title", "def title(self) -> String:\n pass", "def getTitle(self): #$NON-NLS-1$\r", "def getTitle(self): #$NON-NLS-1$\r", "def set_title(self, title):\r\n self.title = title", "def get_title(self) -> str:\n pass", "def set_title (self, title):\n self.title = title", "def title(self):\n\n return self._title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self, title):\n\n self._title = title", "def title(self):\n\t\treturn self.page_title", "def Title(self):\n return self.title", "def title(self):\n return self.run_command('title')[0]", "def Show_Titles( self ):\r\n self.system.Change_Seq( \"Title\" )", "def get_title(self):\n return self.title", "def get_title(self):\n return self.title", "def get_title(self):\n return self.title", "def set_title(self, title):\n \n self.name = title or \"\"", "def set_title(self, title):\n self.data['title'] = title", "def title(self, title: str):\n\n self._title = title", "def title(self):\n return self.__title", "def title(self):\n return self.__title", "def title(self):\n return self.__title", "def title(self, title):\n\t\tself.head += '<title>' + title + '</title>\\n'", "def setTitle(self, title):\n self.__title__ = title", "def title_p(self):\n self.run_command('title_p')", "def title(self):\n return self['title']", "def title(self):\n return self.container['title']", "def title(self) -> str:\n return pulumi.get(self, \"title\")", "def title(self) -> str:\n return pulumi.get(self, \"title\")", "def title(self) -> str:\n return pulumi.get(self, \"title\")", "def title(self, value):\n self.definition.title = value", "def set_title(self, title):\n if check_data_exist(title) is True:\n self.title = title.text", "def title(self):\n return self.definition.title", "def set_title(self, title):\n self.title = title\n self.opf.title = title\n self.ncx.title = title", "def title(self, val):\n self.set_property(\"Title\", val)", "def settitle(self, title):\n self.__title = title", "def settitle(self, title):\n self.__title = title", "def setTitle(self, title):\n self._title = title", "def set_title(self, val):\n self._title = val", "def title(self, value: str):\n self._title = value", "def title(self):\n return self._title", "def title(self):\n return self._title", "def title(self):\n return self._title", "def title(self):\n return self._title", "def title(self):\n return self._title", "def title(self):\n return self._title", "def title(self) -> str:\r\n return self._title", "def title(self):\n return self.get(\"title\")", "def __init__(self, title):\n self._title = title", "def _update_title(self, title, tag, lid):\n return title", "def SetTitle(self, title):\n self.title = str(title)", "def getTitle(self):\n return self.__title__", "def test_title(self):\n key = api.portal.get_registry_record(\n 'plone.site_title'\n )\n self.assertEqual(u'Briefy CMS', key)", "def set_title(self, title):\n\n self.title = title\n\n self.add_metadata('DC', 'title', self.title)", "def get_title(self):\n\n return self.title", "def get_title(self):\n return self.run_command('get_title')[0]", "def title(self):\n return self.header", "def handle_title(self, tag, attrs):\n self.title = 'present'", "def setTitle(self, title):\n self.context.setTitle(title, self.getLanguage())", "def get_title(self, obj):\n title = obj.habit.title\n return title", "def title_n(self):\n self.run_command('title_n')", "def short_title(self):\n if hasattr(self, \"title\"):\n return self.title\n else:\n return \"\"", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self) -> str:\n return self._title", "def title(self):\n if self._title is None:\n if Path(self.rst_path).exists():\n self._title = self.get_title_from_rst()\n elif Path(self.ipynb_path).exists():\n self._title = self.get_title_from_ipynb()\n else:\n pass\n return self._title", "def get_title(self):\n return self._title", "def get_title(self):\n return self._title", "def get_title(self):\n return self._title", "def title(self):\n return 'Fale Conosco'", "def title(self):\n return self.metadata.get('title')", "def title(self):\n return self.get(self._names[\"title\"])", "def title(self):\n return self.get(self._names[\"title\"])", "def title(self) -> Title:\n return self._title", "def html_title(self, title=None):\r\n if title is None:\r\n return \"<title>PyBossa</title>\"\r\n else:\r\n return \"<title>PyBossa &middot; %s</title>\" % title", "def set_title(self, setto):\n command = 'title ' + str(setto)\n self.run_command(command)", "def configured_title(self):\n return self.get('title', self.DEFAULT_SPACE_TITLE)", "def settitle(self, title):\n self.__title = title\n self.__nonzero = True", "def setTitle(self,value):\n self.PDFreactorConfiguration.in1[\"title\"] = value" ]
[ "0.79620063", "0.79186386", "0.7862367", "0.78303343", "0.77898055", "0.77829283", "0.76941615", "0.76941615", "0.7663939", "0.7562071", "0.7539742", "0.7495875", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7455016", "0.7448769", "0.7437641", "0.74290353", "0.7426538", "0.74239075", "0.74239075", "0.74239075", "0.74006385", "0.73956335", "0.73766255", "0.73679465", "0.73679465", "0.73679465", "0.7350605", "0.7344418", "0.7334086", "0.73233867", "0.7303843", "0.72852826", "0.72852826", "0.72852826", "0.7278535", "0.72548187", "0.7223276", "0.72151744", "0.720716", "0.7195988", "0.7195988", "0.7193164", "0.7183665", "0.7171014", "0.7170317", "0.7170317", "0.7170317", "0.7170317", "0.7170317", "0.7170317", "0.71538043", "0.7148151", "0.7143895", "0.71437067", "0.713196", "0.7123574", "0.7123525", "0.7121708", "0.71180964", "0.71174544", "0.7096247", "0.7086618", "0.7061601", "0.70316553", "0.7025604", "0.702535", "0.7004424", "0.7004424", "0.7004424", "0.7004424", "0.7004424", "0.7004424", "0.7004424", "0.69964254", "0.6996276", "0.6996276", "0.6996276", "0.6993256", "0.69842637", "0.69793874", "0.69793874", "0.69723684", "0.69664913", "0.6966415", "0.69486654", "0.6946998", "0.6940956" ]
0.7491403
12
Exposed step should apply defaults without project.
def test_no_step_defaults(self): es = exposed.ExposedStep() self.assertIsNone(es._step)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_Defaults(self):\n self._run(self._test_scenarios, \"Defaults\")", "def setup_default_arguments(self):\n self.add_argument('--clean', action='store_true',\n help='Cleans all generated files.')", "def set_defaults(self):\n self.plastic = False\n self.unset_output()\n self.reward = False\n self.patmod = config.impact_modulation_default", "def set_defaults(context: CreateCommandsContext):\n job_default_parameters: List[\n Parameter\n ] = context.settings.job_default_parameters\n logger.info(\n \"Please set default rows current value shown in [brackets]. Pressing enter\"\n \" without input will keep current value\"\n )\n try:\n project_name = click.prompt(\n \"Please enter default IDIS project name:\",\n show_default=True,\n default=job_default_parameters.project_name,\n )\n\n destination_path = click.prompt(\n \"Please enter default job destination directory:\",\n show_default=True,\n default=job_default_parameters.destination_path,\n )\n except Abort:\n logger.info(\"Cancelled\")\n\n job_default_parameters.project_name = project_name\n job_default_parameters.destination_path = destination_path\n context.settings.save_to()\n logger.info(\"Saved\")", "def without_defaults(self):\n ...", "def test_set_project_default_power_schedule(self):\n pass", "def default():\n raise NotImplementedError(\"Pvwattsv7 default file no longer exists!\")", "def defaults(self):\n self.lib.iperf_defaults(self._test)", "def _default(self):\n self.app.args.print_help()", "def make_default_config(project):\n return {\n \"breathe_projects\": {\n project: \"./_doxygen/xml\"\n },\n \"breathe_default_project\": project,\n \"exhale_args\": {\n # required arguments\n \"containmentFolder\": \"./api\",\n \"rootFileName\": \"{0}_root.rst\".format(project),\n \"rootFileTitle\": \"``{0}`` Test Project\".format(project),\n \"doxygenStripFromPath\": \"..\",\n # additional arguments\n \"exhaleExecutesDoxygen\": True,\n \"exhaleDoxygenStdin\": \"INPUT = ../include\"\n }\n }", "def _post_processing(\n kwargs, skip_translate, invalid\n): # pylint: disable=unused-argument\n # If any defaults were not expicitly passed, add them\n for item in DEFAULTS:\n if item not in kwargs:\n kwargs[item] = DEFAULTS[item]", "def test_build_defaults(self, args, expected):\n sch = scheme.Scheme(*args)\n defaults = sch.build_defaults()\n\n assert defaults == expected", "def test_no_project_defaults(self):\n ep = exposed.ExposedProject()\n self.assertIsNone(ep.display)\n self.assertIsNone(ep.shared)\n self.assertIsNone(ep.settings)\n self.assertIsNone(ep.title)\n self.assertIsNone(ep.id)\n self.assertIsNone(ep.path())\n\n with self.assertRaises(RuntimeError):\n ep.title = 'Some Title'", "def test_with_defaults(self, _):\n result = self.run()\n return self._handle_test_result(result)", "def default():", "def defaults():\n global __preset_staging\n \n t = TreeDict('Default_Parameter_Tree', __defaultpresettree__ = True)\n __preset_staging[id(t)] = t\n return t", "def defaults(argv=None):\n default_cfg = {\n \"random_seed\": 42,\n \"repo_age_in_days\": 10,\n \"fake\": Faker,\n \"team_size\": 3,\n \"developer_strategy\": \"random-uniform\",\n \"general_commit_words\": [\"Add\", \"an\", \"empty\", \"change\"],\n \"merge_commit_words\": [\"Introduce\", \"the\", \"feature\"],\n \"max_commits_per_branch\": 10,\n \"repo_dir\": \"repository\",\n \"datetime_format_template\": r\"%Y-%m-%dT%H:%M:%S\",\n \"ticket_id_template\": r\"ACME-%d\",\n \"message_template\": r\"%s %s\",\n }\n mixin_cfg = mixin(argv)\n cfg = {**default_cfg, **mixin_cfg}\n\n if not cfg.get(\"repo_dir\"):\n raise ValueError(\"empty repo_dir, no implicit current working dir use\")\n\n cfg = activate_model(cfg)\n cfg = seed_model(cfg)\n\n if not cfg.get(\"developers\"):\n if not cfg.get(\"developer_data\"):\n cfg[\"developer_data\"] = [\n (cfg[\"fake\"].name(), cfg[\"fake\"].email())\n for _ in range(cfg[\"team_size\"])\n ]\n cfg[\"developers\"] = pairs_to_actors(cfg[\"developer_data\"])\n\n if cfg[\"developer_strategy\"] not in DEVELOPER_STRATEGIES:\n raise ValueError(\n \"warning: developer selection strategy expected in {} but found ('{}') instead\".format(\n DEVELOPER_STRATEGIES, cfg[\"developer_strategy\"]\n )\n )\n\n return cfg", "def get_default_opts(project_name, **aux_opts):\n # Merge the default options generated by argparse\n opts = parse_args([project_name])\n # Remove inadvertent double definition of project_name\n aux_opts.pop('project', None)\n opts.update(aux_opts)\n opts.setdefault('package', utils.make_valid_identifier(opts['project']))\n opts.setdefault('author', info.username())\n opts.setdefault('email', info.email())\n opts.setdefault('release_date', date.today().strftime('%Y-%m-%d'))\n opts.setdefault('year', date.today().year)\n opts.setdefault('license', 'none')\n opts.setdefault('description', 'Add a short description here!')\n opts.setdefault('url', 'http://...')\n opts.setdefault('version', pyscaffold.__version__)\n opts.setdefault('title',\n '='*len(opts['project']) + '\\n' + opts['project'] + '\\n' +\n '='*len(opts['project']))\n classifiers = ['Development Status :: 4 - Beta',\n 'Programming Language :: Python']\n opts.setdefault('classifiers', utils.list2str(\n classifiers, indent=4, brackets=False, quotes=False, sep=''))\n opts.setdefault('url', 'http://...')\n # Initialize empty list of all requirements\n opts.setdefault('requirements', list())\n opts['namespace'] = utils.prepare_namespace(opts['namespace'])\n if opts['namespace']:\n opts['root_pkg'] = opts['namespace'][0]\n opts['namespace_pkg'] = \".\".join([opts['namespace'][-1],\n opts['package']])\n else:\n opts['root_pkg'] = opts['package']\n opts['namespace_pkg'] = opts['package']\n if opts['update']:\n if not os.path.exists(project_name):\n raise RuntimeError(\n \"Project {project} does not exist and thus cannot be \"\n \"updated!\".format(project=project_name))\n opts = info.project(opts)\n # Reset project name since the one from setup.cfg might be different\n opts['project'] = project_name\n if opts['django']:\n opts['force'] = True\n opts['package'] = opts['project'] # since this is required by Django\n opts['requirements'].append('django')\n if opts['cookiecutter_template']:\n opts['force'] = True\n return opts", "def setup_defaults(self):\n status = self._lib_vscf_ecc.vscf_ecc_setup_defaults(self.ctx)\n VscfStatus.handle_status(status)", "def test_with_defaults():\n runner = CliRunner()\n result = runner.invoke(main, [\"fix_me\"])\n assert result.exit_code == 0\n assert not result.exception", "def test_set_default_config(qibuild_action, build_worktree):\n qibuild_action(\"add-config\", \"foo\", \"--default\")\n assert build_worktree.default_config == \"foo\"", "def get_defaults():\n\n # get package defaults\n with open(os.path.join(iLoop_RNAseq_pipeline.__path__[0], 'defaults', 'RNAseq_pipeline_defaults.txt')) as rpd:\n defaults = {}\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n\n try:\n with open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt')) as rpd:\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n except FileNotFoundError:\n logger.warning('\"RNAseq_pipeline_defaults.txt\" does not exist under home path. An email address and project ID should be should be define in that file.')\n\n # replace with user defaults\n try:\n with open('RNAseq_pipeline_defaults.txt') as rpd:\n for line in rpd.readlines():\n if line.strip():\n defaults[line.split(',')[0].strip()] = line.split(',')[1].strip()\n except FileNotFoundError:\n logger.info(\n '\"RNAseq_pipeline_defaults.txt\" does not exist under this folder. Defaults from the package and home path will be used.')\n\n if 'email' not in defaults:\n if not validate_email(defaults['email']):\n while True:\n email = input('Enter a valid email address for job status: \\n')\n if validate_email(email):\n defaults['email'] = email\n print('Writing email to \"RNAseq_pipeline_defaults.txt\" under home path.')\n f = open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt'), 'w+')\n f.write('\\nemail,{}'.format(email))\n f.close()\n break\n else:\n print('{} is not valid, try again.'.format(email))\n\n if ('project' not in defaults) or (defaults['project'] == 'projectid'):\n project = input('Enter Computerome project ID for billing: \\n')\n # TODO It is possible to validate this by checking folder name under \"/home/projects\".\n defaults['project'] = project\n print('Writing project ID to \"RNAseq_pipeline_defaults.txt\" under home path.')\n f = open(os.path.join(os.path.expanduser(\"~\"), 'RNAseq_pipeline_defaults.txt'), 'w+')\n f.write('\\nproject,{}'.format(project))\n f.close()\n\n return defaults", "def set_defaults(self):\r\n for name, option in self.options.iteritems():\r\n if not option.is_required():\r\n self.set_value(name, option, option.default)", "def set_defaults(self, **kw):\n group = kw.pop('group', None)\n for o, v in kw.items():\n self.cfg_fixture.set_default(o, v, group=group)", "def project_default(tmp_path):\n from nitpick.constants import NITPICK_STYLE_TOML\n from tests.helpers import ProjectMock, tomlstring\n\n nitpick_style = Path(__file__).parent.parent / NITPICK_STYLE_TOML\n return ProjectMock(tmp_path).pyproject_toml(\n f\"\"\"\n [tool.nitpick]\n style = {tomlstring(nitpick_style)}\n \"\"\"\n )", "def use_defaults(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def use_defaults(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def set_default_fitscenarios(self, default_dict):\n try:\n self.setup.set_defaults(default_dict)\n return 1\n except:\n return 0", "def default_migration(random, population, args):\r\n return population", "def configure_project():\n pass", "def defaultPreset (self):\n assert False, \"To be implemented by child\"", "def defaults():\n\n #dummy = FieldTemplate.dummy\n\n return None", "def test_applies_to_default(self):\n self.assertTrue(\n self.extension.template_hook_no_applies.applies_to(self.request))\n self.assertTrue(\n self.extension.template_hook_no_applies.applies_to(None))", "def create_default_settings():\n from flaskbb.fixtures.settings import fixture\n create_settings_from_fixture(fixture)", "def getDefault():", "def default_context(project_name: str) -> None:\n return BuilderContext(\n project_name=project_name,\n kube_name=project_name.replace(\"_\", \"-\"),\n project_description=\"Generated by pytest.\",\n ci_type=CIType.none,\n db=DatabaseType.none,\n db_info=DB_INFO[DatabaseType.none],\n enable_redis=False,\n enable_migrations=False,\n enable_kube=False,\n enable_routers=True,\n add_dummy=False,\n self_hosted_swagger=False,\n force=True,\n )", "def default_arg(default):\n class DefaultArg(argparse.Action):\n def __call__(self, parser, namespace, value, option_string):\n if value is None:\n setattr(namespace, self.dest, default)\n else:\n setattr(namespace, self.dest, value)\n\n return DefaultArg", "def _initialize_defaults(self):\n for key, value in defaults.items():\n if key not in self.source_params:\n self.source_params[key] = value", "def _initialize_defaults(self):\n for key, value in defaults.items():\n if key not in self.source_params:\n self.source_params[key] = value", "def use_defaults(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def set_default_parameters(self):\n super().set_default_parameters()\n if not \"replace_existing_files\" in vars(self):\n self.replace_existing_files = False\n if not \"num_files_per_point\" in vars(self):\n self.num_files_per_point = -1\n if not \"input_location_type\" in vars(self):\n self.input_location_type = \"local\"\n if not \"output_location_type\" in vars(self):\n self.output_location_type = \"local\"", "def set_default_parameters(self):\n super().set_default_parameters()", "def _create_defaults(self):\n return DefaultCommandOptionValues(\n min_confidence=3, output_format='vs7')", "def add_default_options(self):\n\n options = getattr(self.parent, \"pyautodoc_set_default_option\", [])\n for option in options:\n self.set_default_option(option)", "def test_defaults_are_kept_if_not_specified_in_args(\n self, junit4_hooks, full_args\n ):\n args = empty_args(master_repo_names=MASTER_REPO_NAMES)\n expected_ignore_tests = [\"some\", \"tests\"]\n expected_hamcrest_path = HAMCREST_PATH\n expected_junit_path = JUNIT_PATH\n expected_rtd = RTD\n expected_disable_security = False\n\n junit4_hooks._ignore_tests = expected_ignore_tests\n junit4_hooks._hamcrest_path = expected_hamcrest_path\n junit4_hooks._junit_path = expected_junit_path\n junit4_hooks._reference_tests_dir = expected_rtd\n junit4_hooks._disable_security = expected_disable_security\n\n junit4_hooks.parse_args(args)\n\n assert junit4_hooks._ignore_tests == expected_ignore_tests\n assert junit4_hooks._hamcrest_path == expected_hamcrest_path\n assert junit4_hooks._junit_path == expected_junit_path\n assert junit4_hooks._reference_tests_dir == expected_rtd\n assert junit4_hooks._disable_security == expected_disable_security", "def add_default_options(self, argprs):\n argprs.add_argument(\"-c\", \"--completed\", dest=\"completed\", default=None,\n metavar=\"FILE\",\n help=\"Specify FILE of completed OB keys\")\n argprs.add_argument(\"--date-start\", dest=\"date_start\", default=None,\n help=\"Define the start of the schedule ('YYYY-MM-DD HH:MM')\")\n argprs.add_argument(\"--date-stop\", dest=\"date_stop\", default=None,\n help=\"Define the end of the schedule ('YYYY-MM-DD HH:MM')\")\n argprs.add_argument(\"--display\", dest=\"display\", metavar=\"HOST:N\",\n help=\"Use X display on HOST:N\")\n argprs.add_argument(\"-g\", \"--geometry\", dest=\"geometry\",\n metavar=\"GEOM\", default=None,\n help=\"X geometry for initial size and placement\")\n argprs.add_argument(\"-i\", \"--input\", dest=\"input_dir\", default=\".\",\n metavar=\"DIRECTORY\",\n help=\"Read input files from DIRECTORY\")\n argprs.add_argument(\"-f\", \"--format\", dest=\"input_fmt\", default=None,\n metavar=\"FILE_FORMAT\",\n help=\"Specify input file format (csv, xls, or xlsx)\")\n argprs.add_argument(\"--norestore\", dest=\"norestore\", default=False,\n action=\"store_true\",\n help=\"Don't restore the GUI from a saved layout\")\n ## argprs.add_argument(\"--modules\", dest=\"modules\", metavar=\"NAMES\",\n ## help=\"Specify additional modules to load\")\n argprs.add_argument(\"--numthreads\", dest=\"numthreads\", type=int,\n default=30,\n help=\"Start NUM threads in thread pool\", metavar=\"NUM\")\n argprs.add_argument(\"-o\", \"--output\", dest=\"output_dir\", default=None,\n metavar=\"DIRECTORY\",\n help=\"Write output files to DIRECTORY\")\n argprs.add_argument(\"-s\", \"--site\", dest=\"sitename\", metavar=\"NAME\",\n default='subaru',\n help=\"Observing site NAME\")\n argprs.add_argument(\"-t\", \"--toolkit\", dest=\"toolkit\", metavar=\"NAME\",\n default=None,\n help=\"Prefer GUI toolkit (default: choose one)\")\n argprs.add_argument('--version', action='version',\n version='%(prog)s v{version}'.format(version=__version__),\n help=\"Show the qplan version and exit\")\n log.addlogopts(argprs)", "def test_patch_project(self):\n pass", "def testDefault():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n oldX = conf.x\n conf.default('x', None)\n conf.default('Z', 5)\n\n assert conf.x == oldX\n assert conf.Z == 5", "def apply_config_defaults():\n\n # don't worry about broken settings, validate_config() will take\n # care of them\n\n if 'pre_action_callbacks' not in nori.cfg:\n nori.cfg['pre_action_callbacks'] = [\n (pre_action_drupal_readonly, [], {})\n ]\n\n if 'post_action_callbacks' not in nori.cfg:\n nori.cfg['post_action_callbacks'] = [\n (post_action_drupal_readonly, [], {}, True)\n ]\n\n if 'source_type' not in nori.cfg:\n nori.cfg['source_type'] = 'generic'\n\n if 'source_query_func' not in nori.cfg:\n if nori.core.cfg['source_type'] == 'generic':\n nori.core.cfg['source_query_func'] = generic_db_query\n elif nori.core.cfg['source_type'] == 'drupal':\n nori.core.cfg['source_query_func'] = drupal_db_query\n\n if 'source_query_defaulter' not in nori.cfg:\n if nori.core.cfg['source_type'] == 'generic':\n nori.core.cfg['source_query_defaulter'] = (\n apply_generic_arg_defaults\n )\n elif nori.core.cfg['source_type'] == 'drupal':\n nori.core.cfg['source_query_defaulter'] = None\n\n if 'source_query_validator' not in nori.cfg:\n if nori.core.cfg['source_type'] == 'generic':\n nori.core.cfg['source_query_validator'] = validate_generic_args\n elif nori.core.cfg['source_type'] == 'drupal':\n nori.core.cfg['source_query_validator'] = validate_drupal_args\n\n if 'source_template_change_callbacks' not in nori.cfg:\n if nori.core.cfg['source_type'] == 'generic':\n nori.core.cfg['source_template_change_callbacks'] = []\n elif nori.core.cfg['source_type'] == 'drupal':\n nori.core.cfg['source_template_change_callbacks'] = [\n (drupal_timestamp_callback, [], {})\n ]\n\n if 'source_global_change_callbacks' not in nori.cfg:\n if nori.core.cfg['source_type'] == 'generic':\n nori.core.cfg['source_global_change_callbacks'] = []\n elif nori.core.cfg['source_type'] == 'drupal':\n nori.core.cfg['source_global_change_callbacks'] = [\n (drupal_cache_callback, [], {})\n ]\n\n if 'dest_type' not in nori.cfg:\n nori.cfg['dest_type'] = 'generic'\n\n if 'dest_query_func' not in nori.cfg:\n if nori.core.cfg['dest_type'] == 'generic':\n nori.core.cfg['dest_query_func'] = generic_db_query\n elif nori.core.cfg['dest_type'] == 'drupal':\n nori.core.cfg['dest_query_func'] = drupal_db_query\n\n if 'dest_query_defaulter' not in nori.cfg:\n if nori.core.cfg['dest_type'] == 'generic':\n nori.core.cfg['dest_query_defaulter'] = (\n apply_generic_arg_defaults\n )\n elif nori.core.cfg['dest_type'] == 'drupal':\n nori.core.cfg['dest_query_defaulter'] = None\n\n if 'dest_query_validator' not in nori.cfg:\n if nori.core.cfg['dest_type'] == 'generic':\n nori.core.cfg['dest_query_validator'] = validate_generic_args\n elif nori.core.cfg['dest_type'] == 'drupal':\n nori.core.cfg['dest_query_validator'] = validate_drupal_args\n\n if 'dest_template_change_callbacks' not in nori.cfg:\n if nori.core.cfg['dest_type'] == 'generic':\n nori.core.cfg['dest_template_change_callbacks'] = []\n elif nori.core.cfg['dest_type'] == 'drupal':\n nori.core.cfg['dest_template_change_callbacks'] = [\n (drupal_timestamp_callback, [], {})\n ]\n\n if 'dest_global_change_callbacks' not in nori.cfg:\n if nori.core.cfg['dest_type'] == 'generic':\n nori.core.cfg['dest_global_change_callbacks'] = []\n elif nori.core.cfg['dest_type'] == 'drupal':\n nori.core.cfg['dest_global_change_callbacks'] = [\n (drupal_cache_callback, [], {})\n ]\n\n if 'templates' not in nori.core.cfg:\n return\n if not isinstance(nori.core.cfg['templates'],\n nori.core.MAIN_SEQUENCE_TYPES):\n return\n\n for i, template in enumerate(nori.core.cfg['templates']):\n if not isinstance(nori.core.cfg['templates'][i],\n nori.core.MAPPING_TYPES):\n continue\n\n if T_MULTIPLE_KEY not in template:\n nori.core.cfg['templates'][i][T_MULTIPLE_KEY] = False\n\n if T_S_QUERY_ARGS_KEY in template:\n args_t = template[T_S_QUERY_ARGS_KEY]\n defaulter = nori.core.cfg['source_query_defaulter']\n if (isinstance(args_t, tuple) and len(args_t) >= 2 and\n isinstance(args_t[0], nori.core.MAIN_SEQUENCE_TYPES) and\n isinstance(args_t[1], nori.core.MAPPING_TYPES) and\n defaulter and callable(defaulter)):\n defaulter(args_t[0], args_t[1])\n\n if T_TO_D_FUNC_KEY not in template:\n nori.core.cfg['templates'][i][T_TO_D_FUNC_KEY] = None\n\n if T_S_NO_REPL_KEY not in template:\n nori.core.cfg['templates'][i][T_S_NO_REPL_KEY] = False\n\n if T_S_CHANGE_CB_KEY not in template:\n nori.core.cfg['templates'][i][T_S_CHANGE_CB_KEY] = []\n\n if T_D_QUERY_ARGS_KEY in template:\n args_t = template[T_D_QUERY_ARGS_KEY]\n defaulter = nori.core.cfg['dest_query_defaulter']\n if (isinstance(args_t, tuple) and len(args_t) >= 2 and\n isinstance(args_t[0], nori.core.MAIN_SEQUENCE_TYPES) and\n isinstance(args_t[1], nori.core.MAPPING_TYPES) and\n defaulter and callable(defaulter)):\n defaulter(args_t[0], args_t[1])\n\n if T_TO_S_FUNC_KEY not in template:\n nori.core.cfg['templates'][i][T_TO_S_FUNC_KEY] = None\n\n if T_D_NO_REPL_KEY not in template:\n nori.core.cfg['templates'][i][T_D_NO_REPL_KEY] = False\n\n if T_D_CHANGE_CB_KEY not in template:\n nori.core.cfg['templates'][i][T_D_CHANGE_CB_KEY] = []\n\n if T_KEY_MODE_KEY not in template:\n nori.core.cfg['templates'][i][T_KEY_MODE_KEY] = 'all'\n\n if T_KEY_LIST_KEY not in template:\n nori.core.cfg['templates'][i][T_KEY_LIST_KEY] = []", "def _set_default_args(self):\n self._parser.add_argument(\"username\")\n self._parser.add_argument(\"password\")\n self._parser.add_argument(\n \"--start\",\n help=\"Start date for the scraper in iso format, eg: 2017-11-19\",\n type=str,\n default=None,\n )\n self._parser.add_argument(\n \"--end\",\n help=\"End date for the scraper in iso format\",\n type=str,\n default=None,\n )\n self._parser.add_argument(\n \"--skip-delete\",\n help=\"Delete the scraper folder in /tmp after run\",\n action=\"store_true\",\n )", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def default(self):\n raise Error(\"Missing mandatory setting:\", self.name)", "def _inject_defaults(settings, defaults):\n new_settings = {}\n\n if defaults is None:\n return settings\n elif settings is None or len(settings) == 0:\n new_settings = defaults\n else:\n for k, v in settings.items():\n if isinstance(v, dict) or v is None:\n new_settings[k] = Settings._inject_defaults(v, defaults[k])\n else:\n new_settings[k] = settings[k]\n\n for k, v in defaults.items():\n if k not in settings:\n new_settings[k] = defaults[k]\n return new_settings", "def default(self):\n raise NotImplementedError", "def get_default_args(**kw):\n default_args_exp = {\n \"output_file\": \"ml_demo.c\",\n \"function_name\": \"ml_demo\",\n \"precision\": ML_Binary32,\n \"accuracy\": ML_Faithful,\n \"target\": GenericProcessor.get_target_instance()\n }\n default_args_exp.update(kw)\n return DefaultArgTemplate(**default_args_exp)", "def create_default(cls):\n raise NotImplementedError(common.OVERRIDE_MESSAGE)", "def defaults():\n return {}", "def defaults() -> dict:\n pass", "def set_default_subparser(self, default, args=None):\n if not args:\n args = sys.argv[1:]\n if args[0] not in ['-h', '--help', '--version', '-info']:\n if args[0].find('-') != -1:\n msg = \"Defaulting to the 'run' command. Please update the\"\n msg += \" call of MontePython. For more info, see the help\"\n msg += \" string and/or the documentation \"\n warnings.warn(msg)\n args.insert(0, default)\n elif args[0] == '-info':\n msg = \"The info option has been turned into a command. \"\n msg += \"Please substitute '-info' with 'info' when running \"\n msg += \"MontePython\"\n warnings.warn(msg)\n args[0] = 'info'\n return args", "def initDefaultCommand(self):\n pass", "def test_default_argument(self):\n @converters.wrap\n def inner_test(param: int = 5):\n \"\"\"Make sure the default was used.\"\"\"\n self.assertEqual(param, 5)\n inner_test()", "def test_default_context():\n rally = Rally(server=RALLY, user=RALLY_USER, password=RALLY_PSWD, server_ping=False)\n context1 = rally.contextHelper.currentContext()\n workspace = rally.getWorkspace()\n project = rally.getProject()\n context2 = rally.contextHelper.currentContext()\n assert context1 == context2\n assert context1.workspace == DEFAULT_WORKSPACE\n assert workspace.Name == DEFAULT_WORKSPACE\n assert context1.project == DEFAULT_PROJECT\n assert project.Name == DEFAULT_PROJECT\n url = makeResourceUrl(rally, 'Defect')\n #print(url)\n expected_workspace_clause = 'workspace=workspace/%s' % str(workspace.oid)\n assert expected_workspace_clause in url\n expected_project_clause = 'project=project/%s' % str(project.oid)\n assert expected_project_clause in url", "def project_presets(project):\n\n return None", "def test_replace_project(self):\n pass", "def test_defaults_are_overwritten(self, junit4_hooks, full_args):\n junit4_hooks._ignore_tests = \"this isn't even a list\"\n junit4_hooks._hamcrest_path = \"wrong/path\"\n junit4_hooks._junit_path = \"also/wrong/path\"\n junit4_hooks._reference_tests_dir = \"some/cray/dir\"\n junit4_hooks._timeout = 9999\n\n junit4_hooks.parse_args(full_args)\n\n assert junit4_hooks._ignore_tests == IGNORE_TESTS\n assert junit4_hooks._hamcrest_path == HAMCREST_PATH\n assert junit4_hooks._junit_path == JUNIT_PATH\n assert junit4_hooks._reference_tests_dir == RTD\n assert junit4_hooks._timeout == TIMEOUT", "def testDefaults(self, widget):\n assert isinstance(widget.highlight, PythonHighlighter)\n assert isinstance(widget.parameter_dict, dict)\n assert isinstance(widget.pd_parameter_dict, dict)\n\n assert len(widget.model) == 6\n assert \"filename\" in widget.model.keys()\n assert \"overwrite\" in widget.model.keys()\n assert \"description\" in widget.model.keys()\n assert \"parameters\" in widget.model.keys()\n assert \"pd_parameters\" in widget.model.keys()\n assert \"text\" in widget.model.keys()", "def post_process(self, **kwargs):\n self.create_ignore()\n click.echo('Create project {} successfully. Enjoy yourself!'.format(self.app_dir))", "def test_exactly_implicit_default_no_args_optional():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n required : false\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == \"default-value\"", "def show_defaults(context: CreateCommandsContext):\n logger.info(\"Default parameters when creating jobs:\")\n for parameter in context.settings.job_default_parameters:\n logger.info(parameter.describe())", "def defaults(self):\n\n return None", "def defaults(self):\n\n return None", "def apply_defaults(self, db, dest, kvargs, lines):\n table = db.get_table(kvargs['table'])\n default_text = kvargs['default_text']\n table.find_default_from_allowable_range_descriptions(default_text)\n # Log the defaults\n logging.info(\"Defaults for table: {}\".format(table.name))\n for var in table.vars():\n if var.default:\n logging.info(\" {}: {}\".format(var.name,var.default))\n return True", "def test_should_return_none_for_defaults(self):\r\n default_spec = {\r\n 'type': 'defaults',\r\n 'spec_type': 'property',\r\n 'functional': True\r\n }\r\n\r\n assert 'property' not in self.spec_parser._defaults\r\n assert self.spec_parser.parse_statement(default_spec) is None\r\n assert 'property' in self.spec_parser._defaults\r\n assert 'functional' in self.spec_parser._defaults['property']._values", "def addDefaultArgs(parser, defaultLog=\"none\"):\n parser.add_argument(\"--version\", action=\"version\",\n version=f\"Isle {isle.__version__}\")\n parser.add_argument(\"-v\", \"--verbose\", action=\"count\", default=0,\n help=\"Make output more verbose, stacks.\")\n parser.add_argument(\"--log\", default=defaultLog,\n help=\"Specify log file name. Set to none to not write log file.\")\n return parser", "def default(self, stage=False):\n return self._build_config(state='default', stage=stage)", "def test_build__override_default_values(self) -> None:\n ride: dict = RecurringRideFactory.build()\n default_ride_status: str = ride['ride']['status']\n ride['ride']['status'] = 'In Progress'\n\n assert ride['ride']['status'] != default_ride_status", "def add_earlydefault_settings(self):\n self.add_default_settings_config()\n self.add_default_settings_aliases()", "def apply_metadata_defaults(metadata, component_name):\n if 'TEAM' not in metadata:\n raise UserError('TEAM missing from service metadata (service.json)')\n\n def set_default(k, v):\n if k not in metadata:\n metadata[k] = v\n\n set_default('REGION', 'eu-west-1')\n set_default('ACCOUNT_PREFIX', 'mmg')\n\n set_default('TYPE', 'docker')\n\n # can be explicitly None (null)\n set_default('DOMAIN', get_default_domain(component_name))\n set_default('DNS_NAME', None)\n if metadata['DNS_NAME'] is None and metadata['DOMAIN'] is not None:\n metadata['DNS_NAME'] = sub(r'-(?:service|subscriber|admin)$', '', component_name)\n\n set_default('ELBTYPE', 'internal')\n set_default('HEALTHCHECK_SUFFIX', '/internal/healthcheck')\n\n # release specific, but keeping in one place\n set_default('DOCKER_BUILD_DIR', '.')\n\n set_default('SLUG_BUILDER_DOCKER_OPTS', '')\n\n # deployment specific, but keeping in one place\n set_default('CONFIG_HANDLER', 'toml-inline')\n\n return metadata", "def set_studio_default(self):\n raise NotImplementedError(\n \"{} Method `set_studio_default` not implemented!\".format(\n repr(self)\n )\n )", "def test_defaults():\n config = Config(\n env_var='DO_NOT_USE',\n env_prefix='DO_NOT_USE',\n entry_point_name='DO_NOT_USE',\n )\n\n assert not config.keys()", "def test_set_defaults(self):\r\n self.assertEqual(self.config.values['option1'], 1337)\r\n self.assertNotIn('option2', self.config.values)", "def project():", "def project():", "def project():", "def workflow_default(c: Composition) -> None:\n\n # TODO: most of these should likely be converted to cluster tests\n\n for scenario in [pg_out_of_disk_space]:\n with (c.override(Postgres(volumes=[\"pgdata_512Mb:/var/lib/postgresql/data\"]))):\n print(f\"--- Running scenario {scenario.__name__} with limited disk\")\n initialize(c)\n scenario(c)\n end(c)\n\n for scenario in [\n disconnect_pg_during_snapshot,\n disconnect_pg_during_replication,\n restart_pg_during_snapshot,\n restart_mz_during_snapshot,\n restart_pg_during_replication,\n restart_mz_during_replication,\n fix_pg_schema_while_mz_restarts,\n verify_no_snapshot_reingestion,\n ]:\n print(f\"--- Running scenario {scenario.__name__}\")\n initialize(c)\n scenario(c)\n end(c)", "def test_operato_defaults(monkeypatch, tmpdir):\n monkeypatch.chdir(os.path.abspath(os.path.dirname(__file__)))\n\n output = cookiecutter(\n '.', no_input=True, output_dir=str(tmpdir), config_file='config.yaml'\n )\n\n assert output['list'] == 'cats'", "def setup(name = None, to_default = False):\n def default_val(item):\n return item.default if to_default else item.value\n def confirm_s(wanted, item):\n if not wanted or item.name.startswith(wanted):\n return confirm(item.question, default_val(item))\n return default_val(item)\n for p in items:\n p.value = confirm_s(name, p)\n\n # consistency checking\n check()\n\n for p in items:\n p.write_value()\n print 'Remember to invoke prepare-{} if noncached setting was changed'.format(\n 'noncached' if noncached.value else 'normal')", "def test_exactly_explicit_default_no_args_optional():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n multi_type: exactly\n count : 1\n required : false\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == \"default-value\"", "def spread_default_parameters(config, dev_cfg):\n def_cfg = config.get('DEFAULT')\n if def_cfg is None:\n return\n\n for (key, value) in def_cfg.items():\n if key not in dev_cfg:\n dev_cfg[key] = value", "def bootstrap_default():\n\treturn default_configuration", "def voxel_env_override_defaults(env, parser):\n parser.set_defaults(\n encoder_type='conv',\n encoder_subtype='convnet_simple',\n hidden_size=512,\n obs_subtract_mean=0.0,\n obs_scale=255.0,\n actor_worker_gpus=[0],\n )", "def set_default_values_as_needed(self):\n if self.verbose:\n click.echo('Updating required default values')\n for field in ARGUMENTS_DEFAULT_VALUES:\n if self.__class__.__name__ in ARGUMENTS_DEFAULT_VALUES[field][1]:\n self.data[field] = ARGUMENTS_DEFAULT_VALUES[field][0]", "def f_default(self, default = 1) :\n pass", "def get_default_config(self):\n config = super(SlurmJobWasteCollector, self).get_default_config()\n config.update({\n 'path': 'waste'\n })\n return config", "def add_default_sim_arguments(self, skip_defaults: list = []):\n\n def no_skip(s): return s not in skip_defaults\n\n if no_skip('s') and no_skip('step_size') and no_skip('sim_step_size'):\n self.add_argument(\n \"-s\",\n \"--sim_step_size\",\n type=float,\n help=\"Simulation Step Size [s]\",\n default=3e-3,\n dest=\"step_size\",\n )\n\n if no_skip('rs') and no_skip('render_step_size'):\n self.add_argument(\n \"-rs\",\n \"--render_step_size\",\n type=float,\n help=\"Render Update Rate [Hz]\",\n default=1 / 10.0,\n )\n\n if no_skip('e') and no_skip('end_time'):\n self.add_argument(\n \"-e\",\n \"--end_time\",\n type=float,\n help=\"Simulation End Time [s]\",\n default=120,\n )\n\n # if no_skip('r') and no_skip('record'):\n # self.add_argument(\n # \"-r\",\n # \"--record\",\n # action=\"store_true\",\n # help=\"Record Simple State Data\",\n # default=False,\n # )", "def test_build_defaults_failure(self, args):\n sch = scheme.Scheme(*args)\n with pytest.raises(errors.InvalidSchemeError):\n sch.build_defaults()", "def _unset_defaults_and_overrides(self):\n for info, group in self._all_opt_infos():\n info.pop('default', None)\n info.pop('override', None)", "def set_default_values(args):\n if args.confidence_feature_path is None:\n args.confidence_feature_path = os.path.join(args.path, 'confidence_features.pkl')\n\n if args.e2e_dialogue_evaluation and args.val_batch_size[0] != 1:\n logger.warning('When evaluating dialogues end-to-end, val_batch_size should be 1 so we load the data turn by turn')\n args.val_batch_size = [1]", "def test_default_options(self):\r\n\r\n settings.ASSETS_URL_EXPIRE = True\r\n assert get_env().config['url_expire'] == settings.ASSETS_URL_EXPIRE\r\n\r\n settings.ASSETS_ROOT = 'FOO_ASSETS'\r\n settings.STATIC_ROOT = 'FOO_STATIC'\r\n settings.MEDIA_ROOT = 'FOO_MEDIA'\r\n # Pointing to ASSETS_ROOT\r\n assert get_env().directory.endswith('FOO_ASSETS')\r\n get_env().directory = 'BAR'\r\n assert settings.ASSETS_ROOT == 'BAR'\r\n # Pointing to STATIC_ROOT\r\n delsetting('ASSETS_ROOT')\r\n assert get_env().directory.endswith('FOO_STATIC')\r\n get_env().directory = 'BAR'\r\n assert settings.STATIC_ROOT == 'BAR'\r\n # Pointing to MEDIA_ROOT; Note we only\r\n # set STATIC_ROOT to None rather than deleting\r\n # it, a scenario that may occur in the wild.\r\n settings.STATIC_ROOT = None\r\n assert get_env().directory.endswith('FOO_MEDIA')\r\n get_env().directory = 'BAR'\r\n assert settings.MEDIA_ROOT == 'BAR'", "def test_exactly_implicit_default_no_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n required : true\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.MISSING_MANDATORY_ARG.value" ]
[ "0.64410084", "0.63659227", "0.6084587", "0.60367554", "0.60328066", "0.59723306", "0.59047097", "0.5889502", "0.58557147", "0.5821773", "0.57834613", "0.5766115", "0.57598996", "0.57373494", "0.5727727", "0.57167864", "0.5711562", "0.5700834", "0.56967074", "0.567001", "0.5666175", "0.56373227", "0.56334627", "0.56085825", "0.5587259", "0.5573844", "0.5573844", "0.5567918", "0.556729", "0.5554447", "0.554786", "0.55451775", "0.55363566", "0.5524767", "0.55216753", "0.55158883", "0.5506005", "0.5496442", "0.5496442", "0.5477899", "0.5476631", "0.5472226", "0.5448224", "0.5437349", "0.54303426", "0.5417319", "0.54097104", "0.5391584", "0.5386332", "0.5384521", "0.5369346", "0.5366033", "0.53642595", "0.53601205", "0.53527373", "0.533164", "0.5323878", "0.53150606", "0.5313343", "0.5312111", "0.5306995", "0.5304942", "0.53022015", "0.53019726", "0.5288177", "0.5272698", "0.52708113", "0.52603096", "0.5251189", "0.52483916", "0.52483916", "0.52454185", "0.52420604", "0.5241602", "0.5240518", "0.5238843", "0.52356696", "0.52343285", "0.52287185", "0.52265114", "0.5213892", "0.52083343", "0.52083343", "0.52083343", "0.5199583", "0.5186242", "0.51862013", "0.5181331", "0.5170103", "0.5169704", "0.51664454", "0.5163813", "0.5158871", "0.5156893", "0.51552683", "0.5151697", "0.5150216", "0.5149847", "0.5139096", "0.5134161" ]
0.6023307
5
Should stop the step early and not continue running future steps
def test_stop_step_and_halt(self): support.create_project(self, 'homer') support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.test = 0', 'cd.step.breathe()', 'cd.shared.test = 1', 'cd.step.stop(halt=True)', 'cd.shared.test = 2' ])) support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.test = 3' ])) support.run_command('run') project = cd.project.get_internal_project() step = project.steps[1] self.assertEqual(project.shared.fetch('test'), 1) self.assertNotEqual(-1, step.dom.find('cd-StepStop'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_step(self) -> None:", "def test_step_stop_aborted(self, _step: PropertyMock):\n _step.return_value = None\n es = exposed.ExposedStep()\n es.stop()", "def _step(self) -> None:", "def _step(self):\n pass", "def step(self):\n\n pass", "def test_stop_step_no_halt(self):\n support.create_project(self, 'homer2')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.shared.other = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop()',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.other = 1'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(project.shared.fetch('other'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def run_one_step(self):\n pass", "def pre_stop(self):", "def run_skip(self):\n pass", "def halt(*_, **kwargs):\n raise ExecutionFinished(\"Reached halt\")", "def _step(self):\n title()\n self.runCount = 1\n self.experiment.pause = False\n self._runExperiment()\n self.pause = True", "def after_step():\n raise NotImplementedError", "def perform_step(self) -> None:\n pass", "def _step(self, whence):\n pass", "def _prepare_to_stop(self):\n pass", "def stopTestRun(self):", "def step(self, **kwargs):\n pass", "def step(self, action):", "def step(self):\n while self.state != STATE_TERMINAL:\n self.step_strategies[self.state]()", "def TestOneStep(self):\n pass", "def test_stop_step_silent(self):\n contents = '\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(silent=True)',\n 'cd.shared.test = 2'\n ])\n\n support.create_project(self, 'homeritis')\n support.add_step(self, contents=contents)\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[0]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(-1, step.dom.find('cd-StepStop'))", "def step(self):\n self.function()", "def stopCond(self):\n\t\treturn False", "def endOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def stop() -> None:", "def continue_running(self, method):", "def post_stop(self):", "def step(self):\r\n raise NotImplementedError", "def step(self):\n self.driver.step()", "def step(self) -> bool:\n raise NotImplementedError()", "def need_stop(self, path):", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError", "def step(self, step=None):\n pass", "def test_long_run_case_that_we_want_to_skip():\n time.sleep(30)\n assert 0", "def step_async(self, actions):", "def command_continue(self):\n self.step_continue = True", "def _gracefully_stop(self):\n pass", "def stop_check(self):\n pass", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def finish(self):\n self.check_required()\n while self.options.next_breakpoint:\n self.options.shift_breakpoint()\n self.arguments = self.options.get_arguments()\n self.check_required()", "def step_forward(self):", "def pre_step(self,status):\n self.t0 = time.time()\n pass", "def stop(self) -> None:", "def stop(self) -> None:", "def Continue():\n # adjust this to take as many steps as you need\n return warp.top.it <= 500", "def stop(self):\n self.halt = True", "def stop():\n raise StopIteration", "def stepFinished(build, step, results):", "def run_starter(self, expect_to_fail=False):", "def try_advance(self):\n if not self.step.toclick:\n self.step.finished = True\n return True\n return False", "def stop(self):\n self._should_run = False", "def stop(self):\n self.finished = True", "def next_step(self):\n self.proceed()\n self.execute_current()", "def resumeTests(self):\n self.setState('running')\n self.after(100, self.runOneTest)\n return", "def stop():", "def stop():", "def stop():", "def stop():", "def _stop(self):\n return True", "def end_phase():\n pass", "def complete_run():\n pass", "def _run(self):\n logging.warning('-> perform EMPTY experiment...')", "def test_stop_resume(self):\n self.create_sample_data_set_dir(\"node59p1_step1.dat\", TELEM_DIR, \"node59p1.dat\",\n copy_metadata=False)\n driver_config = self._driver_config()['startup_config']\n sio_mule_config = driver_config['harvester'][DataSourceKey.PHSEN_ABCDEF_SIO_MULE]\n fullfile = os.path.join(sio_mule_config['directory'], sio_mule_config['pattern'])\n mod_time = os.path.getmtime(fullfile)\n\n # Create and store the new driver state\n self.memento = {DataSourceKey.PHSEN_ABCDEF_SIO_MULE: {\n \"node59p1.dat\": {\n DriverStateKey.FILE_SIZE: 911,\n DriverStateKey.FILE_CHECKSUM: '8b7cf73895eded0198b3f3621f962abc',\n DriverStateKey.FILE_MOD_DATE: mod_time,\n DriverStateKey.PARSER_STATE: {\n StateKey.IN_PROCESS_DATA: [],\n StateKey.UNPROCESSED_DATA:[[0, 172]],\n StateKey.FILE_SIZE: 911\n }\n }\n }}\n\n self.driver = self._get_driver_object(memento=self.memento)\n\n # create some data to parse\n self.clear_async_data()\n self.create_sample_data_set_dir(\"node59p1_step2.dat\", TELEM_DIR, \"node59p1.dat\",\n copy_metadata=False)\n\n self.driver.start_sampling()\n\n # verify data is produced\n self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml',\n count=2, timeout=10)", "def bail(self, msg):\n self.logger.error(\"Can't handle follower result: {}\".format(msg))\n self.call('ctrl', 'stop_full')\n sys.exit(1)", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def test_correctly_sets_halt_on_next(self, get_pipe_manager):\n\n # Establish manager and perform initial control assertions.\n pm = get_pipe_manager(name=\"TestPM\")\n pm.stop_after = \"step2\"\n assert not pm.halt_on_next\n\n # Make non-halt-status-altering checkpointed timestamp call and\n # verify that we're still running and that we're not scheduled to halt.\n pm.timestamp(checkpoint=\"step1\")\n assert not pm.halt_on_next\n\n # Make halt-status-altering checkpointed timestamp call and verify\n # that we're still running and that we've now been scheduled to halt.\n pm.timestamp(checkpoint=\"step2\")\n assert pm.halt_on_next", "def step(self):\n #1. Time progresses\n self.time_operator.step()\n \n #2. Form and dissolve relationships\"\n self.relationship_operator.step()\n\n #3. HIV transmission\n self.infection_operator.step()", "def step(self):\n try:\n self.tiempos.siguiente()\n except StopIteration:\n return", "def _maybe_stop_iteration(self, global_step, batch_count):\n if batch_count == self.STOP_BATCH_COUNT_PER_EPOCH:\n warnings.warn(\n \"The memory benchmark runner performs only \"\n + f\"{self.STOP_BATCH_COUNT_PER_EPOCH} steps per epoch.\"\n )\n raise StopIteration", "def _stop(self):", "def step(self, state):", "def step(self, action):\n pass", "def step(self, action):\n pass", "def stepStarted(build, step):", "def aborting(self):\n \n pass", "def run_out_of_time(self):\n self.out_of_time = True", "def stop(self):\r\n self.terminating = True", "def run_job(job, interrupt_if_necessary):", "def test_stop_resume(self):\n \n self.create_sample_data_set_dir(\n \"node59p1_step1.dat\",\n TELEM_DIR,\n \"node59p1.dat\",\n copy_metadata=False\n )\n \n # create the recovered file\n self.create_sample_data_set_dir(\n \"DOS15908_1st7_step1.DAT\",\n RECOV_DIR,\n \"DOS15908.DAT\",\n copy_metadata=False\n )\n \n # create some data to parse\n self.clear_async_data()\n \n self.driver.start_sampling()\n\n # verify data is produced\n self.assert_data(\n DostadParserTelemeteredDataParticle,\n 'test_data_1_ss1.txt.result.yml',\n count=1,\n timeout=10\n )\n self.assert_data(\n DostadParserRecoveredDataParticle,\n 'test_data_1r_ss1.txt.result.yml',\n count=1,\n timeout=10\n )\n\n self.driver.stop_sampling()\n\n self.driver.start_sampling()\n \n self.assert_data(\n DostadParserTelemeteredMetadataDataParticle,\n 'test_data_1_ss2.txt.result.yml',\n count=1,\n timeout=10\n )\n self.assert_data(\n DostadParserRecoveredMetadataDataParticle,\n 'test_data_1r_ss2.txt.result.yml',\n count=1,\n timeout=10\n )", "def foreceStop(self):\n self.__success = False\n self.stop()", "def train_loop_post(self, current_step):\r\n pass", "def step(self, action):\n assert self.action_space.contains(action), \"%r (%s) invalid\"%(action, type(action))\n self.microgridPolicy.improveAction(action);\n\n self.microgrid.update();\n\n self.updateState();\n done = self.microgridPolicy.verifyStopConditions();\n reward = self.microgridPolicy.computeReward(done)\n if done: \n if self.steps_beyond_done is None:\n self.steps_beyond_done = 0\n else:\n logger.warn(\"You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior.\")\n self.steps_beyond_done += 1\n self.clock.increaseTimeStep();\n return self.state, reward, done, {}", "def _waitForGoSignal(self):\n while not self.goSignal:\n if self.exp.doFinish:\n raise exception.EarlyFinish\n\n if self.exp.doAbort:\n raise exception.ExposureAborted\n\n pfsTime.sleep.millisec()", "def run(self):\n sys.exit(-1)", "def step(self):\n # Fast learning\n task_embedding = self._ilp.infer_task()\n\n # Posterior update\n #self._skip_flag = self._is_graph_same(task_embedding, self._prev_task_embedding)\n self._skip_flag = False # XXX do not skip test\n if not self._skip_flag:\n self._grprop.observe_task(task_embedding)\n self._prev_task_embedding = task_embedding\n else:\n print(\"skipping!\")", "def stop(self):\n return", "def step_solution(self):\n import time, random\n time.sleep(1.0)\n print '(step_solution) Implement me!'\n return True if random.random() < 0.25 else False", "def stop(self) -> None:\n ...", "def test_run_ended(self):", "def nanny(self): \n while not self.started and not self.failed:\n eventlet.sleep(.1)\n return not self.failed", "def step_impl(context):\n pass" ]
[ "0.7293106", "0.69339883", "0.68969584", "0.68435454", "0.6819655", "0.68091816", "0.6744681", "0.6729426", "0.67282057", "0.6633512", "0.6604878", "0.6572258", "0.65700793", "0.6551742", "0.65375674", "0.6494899", "0.6462652", "0.6412046", "0.6402601", "0.63951445", "0.6392409", "0.6380867", "0.6371049", "0.63612694", "0.63604355", "0.6360419", "0.63558966", "0.63521266", "0.63433516", "0.63274986", "0.63036776", "0.6303028", "0.6303028", "0.6303028", "0.6301328", "0.62928236", "0.62638766", "0.6258656", "0.625147", "0.62402487", "0.6237678", "0.6226744", "0.62144506", "0.6208431", "0.61811733", "0.61758566", "0.61702967", "0.61702967", "0.6168642", "0.6165214", "0.6148807", "0.6133623", "0.6125649", "0.6112182", "0.61118513", "0.6110816", "0.6110157", "0.61099327", "0.61048925", "0.61048925", "0.61048925", "0.61048925", "0.6100513", "0.6097909", "0.609753", "0.60823584", "0.60802597", "0.6076767", "0.607496", "0.607496", "0.607496", "0.607496", "0.607496", "0.607496", "0.6074337", "0.6073528", "0.6070607", "0.6068958", "0.60686874", "0.60646105", "0.60577023", "0.60577023", "0.60376805", "0.60325754", "0.60020113", "0.59987426", "0.5996978", "0.59945273", "0.5993728", "0.5980396", "0.5976863", "0.59727645", "0.59705055", "0.5970373", "0.59658957", "0.59630764", "0.5962527", "0.5962497", "0.59395653", "0.59339696" ]
0.64876556
16
Should stop the step early and not continue running future steps because the project was halted.
def test_stop_project(self): support.create_project(self, 'homer3') support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.test = 0', 'cd.step.breathe()', 'cd.shared.test = 1', 'cd.project.stop()', 'cd.shared.test = 2' ])) support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.test = 3' ])) support.run_command('run') project = cd.project.get_internal_project() step = project.steps[1] self.assertEqual(project.shared.fetch('test'), 1) self.assertNotEqual(-1, step.dom.find('cd-StepStop'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_stop_step_no_halt(self):\n support.create_project(self, 'homer2')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.shared.other = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop()',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.other = 1'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(project.shared.fetch('other'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def halt(*_, **kwargs):\n raise ExecutionFinished(\"Reached halt\")", "def test_stop_step_and_halt(self):\n support.create_project(self, 'homer')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(halt=True)',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 3'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def test_step_stop_aborted(self, _step: PropertyMock):\n _step.return_value = None\n es = exposed.ExposedStep()\n es.stop()", "def test_stop_step_silent(self):\n contents = '\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(silent=True)',\n 'cd.shared.test = 2'\n ])\n\n support.create_project(self, 'homeritis')\n support.add_step(self, contents=contents)\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[0]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(-1, step.dom.find('cd-StepStop'))", "def do_step(self) -> None:", "def _step(self):\n title()\n self.runCount = 1\n self.experiment.pause = False\n self._runExperiment()\n self.pause = True", "def run_skip(self):\n pass", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def _prepare_to_stop(self):\n pass", "def stop(self):\n self.halt = True", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stop(self):\n self._run_flag = False\n self.wait()", "def stopTestRun(self):", "def stopCond(self):\n\t\treturn False", "def run_one_step(self):\n pass", "def pre_stop(self):", "def step(self):\n\n pass", "def _gracefully_stop(self):\n pass", "def stop(self):\n self._should_run = False", "def resumeTests(self):\n self.setState('running')\n self.after(100, self.runOneTest)\n return", "def command_continue(self):\n self.step_continue = True", "def need_stop(self, path):", "def _step(self):\n pass", "def stop() -> None:", "def stopBuild(reason=\"<no reason given>\"):", "def halt(self, _):\n self.execution_manager.is_submission_enabled = False\n self.menu_structure['terminate'] = ('main', [('Continue submitting jobs', self.enable_submission)])\n self.__back_to_main()", "def _step(self) -> None:", "def stop(self):\n self.finished = True", "def halt():\n\n jobs = [j.name for j in config.all_jobs]\n nomad.stop_and_wait(jobs)", "def abort(self):\n try:\n self.acqRunning = False\n except:\n print('Cannot abort properly')", "def Halt(self):\n self.run_halt = True", "def foreceStop(self):\n self.__success = False\n self.stop()", "def finish(self):\n self.check_required()\n while self.options.next_breakpoint:\n self.options.shift_breakpoint()\n self.arguments = self.options.get_arguments()\n self.check_required()", "def _stop(self):\n return True", "def exit(self):\n self.runtime.halted = True", "async def stop(self) -> None:\n with STOP_BLUEPRINT.as_task(name=self.name):\n self._change_blueprint_state(BlueprintState.TERMINATING)\n\n try:\n for steps in reversed(self.execution_order):\n stop_steps = [step for step in steps if hasattr(step, \"stop\")]\n if stop_steps:\n NEXT_BOOTSTEPS.log(name=self.name, next_bootsteps=stop_steps)\n async with trio.open_nursery() as nursery:\n for step in stop_steps:\n _apply_step(nursery, step.stop)\n except Exception as e:\n self._change_blueprint_state((BlueprintState.FAILED, e))\n raise\n else:\n self._change_blueprint_state(BlueprintState.TERMINATED)", "def aborting(self):\n \n pass", "def stop(self):\n self._run = False", "def endOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def stop(self):\r\n self.terminating = True", "def stop_fixture(self):\n pass", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def run(self):\n sys.exit(-1)", "def halt_cmd(ctx):\n pass", "def set_continue(self):\n # Don't stop except at breakpoints or when finished\n self._set_stopinfo(self.botframe, None, -1)\n if not self.breaks:\n # no breakpoints; run without debugger overhead\n sys.settrace(None)\n frame = sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame = frame.f_back", "def perform_step(self) -> None:\n pass", "def post_stop(self):", "def step(self):\n self.driver.step()", "def stop(self) -> None:", "def stop(self) -> None:", "def stop_check(self):\n pass", "def stepStarted(build, step):", "def step(self, **kwargs):\n pass", "def on_run_clicked(self):\n self.start_threading()\n self.stepping = False\n self.step_event.set()", "def stop(self) -> None:\n ...", "def eStop(self):\n Step(speed=0, coils=1, steps=0, dir=Step.BRAKE)\n # #######################################################\n # Need to blink Stop and wait until Stop is pressed again\n # #######################################################", "def run_starter(self, expect_to_fail=False):", "def exit_engine(self):\n self.stop_flag = True", "def halt(self):\n\n print(\"Halt program. Exit emulator.\")\n self.running = False\n sys.exit()", "def stop(self):\n return", "def stop(self):\n self.exit.set()", "def stop(self):\n self.exit.set()", "def TestOneStep(self):\n pass", "def stop_running_phase(self) -> None:\n self.running_phase_state = None", "def continue_running(self, method):", "def after_step():\n raise NotImplementedError", "def fatal_error_processor(self):\n while True:\n _ = (yield)\n self.failed = True\n self.converged = False\n self.solve_completed = False", "def step(self) -> bool:\n raise NotImplementedError()", "def bail(self, msg):\n self.logger.error(\"Can't handle follower result: {}\".format(msg))\n self.call('ctrl', 'stop_full')\n sys.exit(1)", "def stop(self):\n return self.setup.stop", "def stop():", "def stop():", "def stop():", "def stop():", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def test_long_run_case_that_we_want_to_skip():\n time.sleep(30)\n assert 0", "def _run(self):\n logging.warning('-> perform EMPTY experiment...')", "def halted(self) -> bool:\n raise NotImplementedError(\"halted not implemented.\")", "def _stop(self):", "def _step(self, whence):\n pass", "def abort(self, message: str) -> None:\n message = f\"{Invocation.current.log} - {message}\"\n self.exception = StepException(message)\n global failure_aborts_build # pylint: disable=invalid-name\n global no_actions # pylint: disable=invalid-name\n if failure_aborts_build.value and not no_actions.value:\n no_additional_complaints()\n raise self.exception", "def test_correctly_sets_halt_on_next(self, get_pipe_manager):\n\n # Establish manager and perform initial control assertions.\n pm = get_pipe_manager(name=\"TestPM\")\n pm.stop_after = \"step2\"\n assert not pm.halt_on_next\n\n # Make non-halt-status-altering checkpointed timestamp call and\n # verify that we're still running and that we're not scheduled to halt.\n pm.timestamp(checkpoint=\"step1\")\n assert not pm.halt_on_next\n\n # Make halt-status-altering checkpointed timestamp call and verify\n # that we're still running and that we've now been scheduled to halt.\n pm.timestamp(checkpoint=\"step2\")\n assert pm.halt_on_next", "def pause(self):\n if self._pause:\n self._pause = False\n else:\n self._pause = True\n self.step() # trigger the next step", "def after_step(context, step):\n if context.config.userdata.getbool(\"debug\") and step.status == \"failed\":\n spost_mortem(step.exc_traceback)", "def _maybe_stop_iteration(self, global_step, batch_count):\n if batch_count == self.STOP_BATCH_COUNT_PER_EPOCH:\n warnings.warn(\n \"The memory benchmark runner performs only \"\n + f\"{self.STOP_BATCH_COUNT_PER_EPOCH} steps per epoch.\"\n )\n raise StopIteration", "def execute(self):\n\t\tself.drivetrain.bad_auto_drive()\n\t\tself.drivetrain.stop_robot()", "def step(self, step=None):\n pass", "def stop(self):\r\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass" ]
[ "0.71432555", "0.69802755", "0.69084686", "0.68055534", "0.6585818", "0.65606755", "0.6539221", "0.6507136", "0.6481271", "0.64599055", "0.64539194", "0.64235467", "0.64235467", "0.64235467", "0.64235467", "0.64235467", "0.64235467", "0.6410082", "0.63917327", "0.6325879", "0.6317153", "0.6310043", "0.62836003", "0.62513673", "0.62508667", "0.62307024", "0.6219371", "0.6213522", "0.62090683", "0.6171041", "0.61548007", "0.6146886", "0.61400986", "0.61389023", "0.6135257", "0.6134586", "0.6098995", "0.6092722", "0.60768676", "0.607684", "0.60720974", "0.60702044", "0.6059681", "0.60542405", "0.6049748", "0.6048702", "0.60415715", "0.6040509", "0.60386944", "0.6035671", "0.60231185", "0.6022652", "0.59979594", "0.5996862", "0.5996862", "0.59879714", "0.5980037", "0.5973085", "0.5959017", "0.595868", "0.59538835", "0.5948725", "0.59440255", "0.59430724", "0.5936149", "0.5922985", "0.5922985", "0.59213114", "0.5907307", "0.5905208", "0.5902345", "0.5898143", "0.58911693", "0.5890338", "0.58897704", "0.58742046", "0.58742046", "0.58742046", "0.58742046", "0.58648103", "0.58584285", "0.5858345", "0.585562", "0.58521646", "0.5850054", "0.5845707", "0.583958", "0.58380127", "0.5831146", "0.58310455", "0.5827384", "0.5825701", "0.58235806", "0.58218837", "0.58218837", "0.58218837", "0.58218837", "0.58218837", "0.58218837", "0.58218837" ]
0.6601984
4
Should stop the step early but continue running future steps
def test_stop_step_no_halt(self): support.create_project(self, 'homer2') support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.test = 0', 'cd.shared.other = 0', 'cd.step.breathe()', 'cd.shared.test = 1', 'cd.step.stop()', 'cd.shared.test = 2' ])) support.add_step(self, contents='\n'.join([ 'import cauldron as cd', 'cd.shared.other = 1' ])) support.run_command('run') project = cd.project.get_internal_project() step = project.steps[1] self.assertEqual(project.shared.fetch('test'), 1) self.assertEqual(project.shared.fetch('other'), 1) self.assertNotEqual(-1, step.dom.find('cd-StepStop'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_step(self) -> None:", "def _step(self) -> None:", "def step(self):\n\n pass", "def _step(self):\n pass", "def run_one_step(self):\n pass", "def perform_step(self) -> None:\n pass", "def step(self):\n while self.state != STATE_TERMINAL:\n self.step_strategies[self.state]()", "def next_step(self):\n self.proceed()\n self.execute_current()", "def _step(self):\n title()\n self.runCount = 1\n self.experiment.pause = False\n self._runExperiment()\n self.pause = True", "def _step(self, whence):\n pass", "def step(self, **kwargs):\n pass", "def step(self):\n self.driver.step()", "def command_continue(self):\n self.step_continue = True", "def step(self):\n self.function()", "def step(self, action):", "def test_step_stop_aborted(self, _step: PropertyMock):\n _step.return_value = None\n es = exposed.ExposedStep()\n es.stop()", "def after_step():\n raise NotImplementedError", "def continue_running(self, method):", "def run_skip(self):\n pass", "def finish(self):\n self.check_required()\n while self.options.next_breakpoint:\n self.options.shift_breakpoint()\n self.arguments = self.options.get_arguments()\n self.check_required()", "def step(self):\r\n raise NotImplementedError", "def halt(*_, **kwargs):\n raise ExecutionFinished(\"Reached halt\")", "def step_forward(self):", "def step(self):\n raise NotImplementedError", "def step(self) -> bool:\n raise NotImplementedError()", "def step_async(self, actions):", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n #1. Time progresses\n self.time_operator.step()\n \n #2. Form and dissolve relationships\"\n self.relationship_operator.step()\n\n #3. HIV transmission\n self.infection_operator.step()", "def step(self, step=None):\n pass", "def endOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def try_advance(self):\n if not self.step.toclick:\n self.step.finished = True\n return True\n return False", "def step(self):\n try:\n self.tiempos.siguiente()\n except StopIteration:\n return", "def resumeTests(self):\n self.setState('running')\n self.after(100, self.runOneTest)\n return", "def Continue():\n # adjust this to take as many steps as you need\n return warp.top.it <= 500", "def pre_stop(self):", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def TestOneStep(self):\n pass", "def test_stop_step_and_halt(self):\n support.create_project(self, 'homer')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(halt=True)',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 3'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def _run_next_state(self):\n if self.state != \"STOP\":\n self.state = self.get_state_info(\"next\")\n self._run_state()", "def step(self):\n self.schedule.step()", "def stepFinished(build, step, results):", "def train_loop_post(self, current_step):\r\n pass", "def step(self, state):", "def step(self):\n self.latent.step()", "def run(self, steps=1000):\n for step in range(steps):\n if self.is_done():\n return\n self.step()", "def run(self, steps=1000):\n for step in range(steps):\n if self.is_done():\n return\n self.step()", "def _prepare_to_stop(self):\n pass", "def step(self, action):\n pass", "def step(self, action):\n pass", "def step(self, action):\n assert self.action_space.contains(action), \"%r (%s) invalid\"%(action, type(action))\n self.microgridPolicy.improveAction(action);\n\n self.microgrid.update();\n\n self.updateState();\n done = self.microgridPolicy.verifyStopConditions();\n reward = self.microgridPolicy.computeReward(done)\n if done: \n if self.steps_beyond_done is None:\n self.steps_beyond_done = 0\n else:\n logger.warn(\"You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior.\")\n self.steps_beyond_done += 1\n self.clock.increaseTimeStep();\n return self.state, reward, done, {}", "def run(self, steps = 1000):\n for step in range(steps):\n if self.is_done():\n return\n self.step()", "def run_step(self):\n self.control_instance.run_step()", "def proceed(self):\n pass", "def _waitForGoSignal(self):\n while not self.goSignal:\n if self.exp.doFinish:\n raise exception.EarlyFinish\n\n if self.exp.doAbort:\n raise exception.ExposureAborted\n\n pfsTime.sleep.millisec()", "def stop():\n raise StopIteration", "def step(self):\n # Fast learning\n task_embedding = self._ilp.infer_task()\n\n # Posterior update\n #self._skip_flag = self._is_graph_same(task_embedding, self._prev_task_embedding)\n self._skip_flag = False # XXX do not skip test\n if not self._skip_flag:\n self._grprop.observe_task(task_embedding)\n self._prev_task_embedding = task_embedding\n else:\n print(\"skipping!\")", "def pre_step(self,status):\n self.t0 = time.time()\n pass", "async def run(self) -> Optional[BaseException]: # pylint: disable=too-many-branches,too-many-statements\n active = Invocation.active.get(self.name)\n if active is not None:\n return await self.done(self.wait_for(active))\n\n self._become_current()\n Logger.trace(\"Call\")\n\n global rebuild_changed_actions # pylint: disable=invalid-name\n if rebuild_changed_actions.value:\n self.new_persistent_actions.append(PersistentAction())\n self.read_old_persistent_actions()\n\n assert self.name not in Invocation.active\n Invocation.active[self.name] = self\n self.collect_initial_outputs()\n\n try:\n assert self.step is not None\n try:\n await self.done(self.step.function(**self.kwargs))\n except RestartException:\n self._restart()\n await self.done(self.step.function(**self.kwargs))\n await self.done(self.sync())\n await self.done(self.collect_final_outputs())\n\n except StepException as exception: # pylint: disable=broad-except\n self.exception = exception\n\n finally:\n self._become_current()\n\n if self.exception is None:\n assert not self.async_actions\n if self.new_persistent_actions:\n if len(self.new_persistent_actions) > 1 and self.new_persistent_actions[-1].is_empty():\n self.new_persistent_actions.pop()\n\n if not self.did_skip_actions:\n self.write_new_persistent_actions()\n elif len(self.new_persistent_actions) < len(self.old_persistent_actions):\n Logger.warning(\"Skipped some action(s) \" \"even though changed to remove some final action(s)\")\n\n if self.did_run_actions:\n Logger.trace(\"Done\")\n elif self.did_skip_actions:\n Logger.trace(\"Skipped\")\n else:\n Logger.trace(\"Complete\")\n\n else:\n while self.async_actions:\n try:\n await self.done(self.async_actions.pop())\n except StepException:\n pass\n if self.did_run_actions:\n self.poison_all_outputs()\n self.remove_old_persistent_data()\n if not isinstance(self.exception, DryRunException):\n Logger.trace(\"Fail\")\n\n del Invocation.active[self.name]\n if self.condition is not None:\n await self.done(self.condition.acquire())\n self.condition.notify_all()\n self.condition.release()\n\n global failure_aborts_build # pylint: disable=invalid-name\n if self.exception is not None and failure_aborts_build.value:\n no_additional_complaints()\n raise self.exception\n\n return self.exception", "def complete_run():\n pass", "def step(self): \n self.reset_parameters()\n\n if np.random.uniform(0, 1) < self.model.churn_prob: self.exit_triggered = True \n if self.exit_triggered:\n self.exit()\n else:\n self.register_deposit(self.deposit_intent)\n self.register_contribution(self.contribution_intent)\n self.register_sponsorship(self.sponsor_intent)\n self.register_euro_exchange(self.euro_exchange_intent)\n self.register_teo_exchange(self.teo_exchange_intent)\n self.register_withdraw(self.withdraw_intent)", "def proceed(self):\n if self.current_step is None or self.step_position == StepPosition.Before:\n return\n\n for condition, transition in self.current_step.conditions:\n if condition.satisfied():\n new_proc = transition.procedure\n self.current_procedure_id = new_proc\n self.current_step = self._suite[new_proc].steps[transition.step]\n self.step_position = StepPosition.Before\n break", "def step(self, actions):\n self.step_async(actions)\n return self.step_wait()", "def step(self, actions):\n self.step_async(actions)\n return self.step_wait()", "def test_stop_resume(self):\n self.create_sample_data_set_dir(\"node59p1_step1.dat\", TELEM_DIR, \"node59p1.dat\",\n copy_metadata=False)\n driver_config = self._driver_config()['startup_config']\n sio_mule_config = driver_config['harvester'][DataSourceKey.PHSEN_ABCDEF_SIO_MULE]\n fullfile = os.path.join(sio_mule_config['directory'], sio_mule_config['pattern'])\n mod_time = os.path.getmtime(fullfile)\n\n # Create and store the new driver state\n self.memento = {DataSourceKey.PHSEN_ABCDEF_SIO_MULE: {\n \"node59p1.dat\": {\n DriverStateKey.FILE_SIZE: 911,\n DriverStateKey.FILE_CHECKSUM: '8b7cf73895eded0198b3f3621f962abc',\n DriverStateKey.FILE_MOD_DATE: mod_time,\n DriverStateKey.PARSER_STATE: {\n StateKey.IN_PROCESS_DATA: [],\n StateKey.UNPROCESSED_DATA:[[0, 172]],\n StateKey.FILE_SIZE: 911\n }\n }\n }}\n\n self.driver = self._get_driver_object(memento=self.memento)\n\n # create some data to parse\n self.clear_async_data()\n self.create_sample_data_set_dir(\"node59p1_step2.dat\", TELEM_DIR, \"node59p1.dat\",\n copy_metadata=False)\n\n self.driver.start_sampling()\n\n # verify data is produced\n self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml',\n count=2, timeout=10)", "def stopTestRun(self):", "def step(self):\n if not self.is_done():\n actions = [ agent.program(self.percept(agent)) for agent in self.agents ]\n for agent, action in zip(self.agents, actions):\n self.execute_action(agent, action)\n\n self.exogenous_change()", "def post_stop(self):", "def test_stop_step_silent(self):\n contents = '\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(silent=True)',\n 'cd.shared.test = 2'\n ])\n\n support.create_project(self, 'homeritis')\n support.add_step(self, contents=contents)\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[0]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(-1, step.dom.find('cd-StepStop'))", "def stop() -> None:", "def test_long_run_case_that_we_want_to_skip():\n time.sleep(30)\n assert 0", "def checkStep(rc, steps, run_status, prog_args):\n\n if (rc == FAILURE) or (rc == EXCEPTION):\n buildException(run_status, 'previous command failed')\n else:\n defer.maybeDeferred(lambda x: startNextStep(x,\n run_status, prog_args), steps)", "def run(self, p):\n while self.state < 3:\n self.__step(p)", "def run(self):\n\n while not self.__done:\n self.single_cycle()\n\n \"\"\"\n while not self.__done:\n self.step()\n self.debug()\n \"\"\"", "def step_button(self):\n if self.run_button=='Stop':\n return\n\n max_res_steps = self.params['max_res_steps']\n max_school_steps = self.params['max_school_steps']\n school_time = self.model.scheduler.get_time('school')\n residential_time = self.model.scheduler.get_time('residential')\n\n # If residential is not converged yet or below max steps, do a step\n if (residential_time < max_res_steps and not self.model.res_ended):\n self.model.step(residential=True)\n self.model.res_ended = self.model.convergence_check()\n else:\n self.model.res_ended = True\n\n # Initial school step needs to be executed\n if (school_time == 0 and self.model.res_ended):\n self.residential = False\n self.model.step(residential=False, initial_schools=True)\n self.model.convergence_check()\n\n # Normal school steps\n elif (school_time < max_school_steps and self.model.res_ended and not self.model.school_ended):\n self.model.step(residential=False, initial_schools=False)\n\n if self.model.convergence_check():\n self.model.school_ended = True\n print('School process ended.')\n\n # Both processes are done/converged\n if (self.model.res_ended and self.model.school_ended):\n self.run_button()\n return\n\n self.update_data()", "def train_loop_pre(self, current_step):\r\n pass", "def step(self, action):\n raise NotImplementedError", "def next(self):\n while not self.is_stable():\n self.step()", "def step(self, action):\n raise NotImplementedError()", "def step_solution(self):\n import time, random\n time.sleep(1.0)\n print '(step_solution) Implement me!'\n return True if random.random() < 0.25 else False", "def on_step_clicked(self):\n self.start_threading()\n self.stepping = True\n self.step_event.set()", "def on_run_clicked(self):\n self.start_threading()\n self.stepping = False\n self.step_event.set()", "def bail(self, msg):\n self.logger.error(\"Can't handle follower result: {}\".format(msg))\n self.call('ctrl', 'stop_full')\n sys.exit(1)", "def run_starter(self, expect_to_fail=False):", "def run_job(job, interrupt_if_necessary):", "def stepStarted(build, step):", "def pause(self):\n if self._pause:\n self._pause = False\n else:\n self._pause = True\n self.step() # trigger the next step", "def test_correctly_sets_halt_on_next(self, get_pipe_manager):\n\n # Establish manager and perform initial control assertions.\n pm = get_pipe_manager(name=\"TestPM\")\n pm.stop_after = \"step2\"\n assert not pm.halt_on_next\n\n # Make non-halt-status-altering checkpointed timestamp call and\n # verify that we're still running and that we're not scheduled to halt.\n pm.timestamp(checkpoint=\"step1\")\n assert not pm.halt_on_next\n\n # Make halt-status-altering checkpointed timestamp call and verify\n # that we're still running and that we've now been scheduled to halt.\n pm.timestamp(checkpoint=\"step2\")\n assert pm.halt_on_next", "def fatal_error_processor(self):\n while True:\n _ = (yield)\n self.failed = True\n self.converged = False\n self.solve_completed = False", "def test_stop_resume(self):\n \n self.create_sample_data_set_dir(\n \"node59p1_step1.dat\",\n TELEM_DIR,\n \"node59p1.dat\",\n copy_metadata=False\n )\n \n # create the recovered file\n self.create_sample_data_set_dir(\n \"DOS15908_1st7_step1.DAT\",\n RECOV_DIR,\n \"DOS15908.DAT\",\n copy_metadata=False\n )\n \n # create some data to parse\n self.clear_async_data()\n \n self.driver.start_sampling()\n\n # verify data is produced\n self.assert_data(\n DostadParserTelemeteredDataParticle,\n 'test_data_1_ss1.txt.result.yml',\n count=1,\n timeout=10\n )\n self.assert_data(\n DostadParserRecoveredDataParticle,\n 'test_data_1r_ss1.txt.result.yml',\n count=1,\n timeout=10\n )\n\n self.driver.stop_sampling()\n\n self.driver.start_sampling()\n \n self.assert_data(\n DostadParserTelemeteredMetadataDataParticle,\n 'test_data_1_ss2.txt.result.yml',\n count=1,\n timeout=10\n )\n self.assert_data(\n DostadParserRecoveredMetadataDataParticle,\n 'test_data_1r_ss2.txt.result.yml',\n count=1,\n timeout=10\n )", "def end_phase():\n pass", "def stopCond(self):\n\t\treturn False", "def _run_next_automation_sequence(self) -> None:\n sequence_iterator = iter(self.loaded_automation_sequence)\n\n while True:\n # sleep for a bit if we are paused to save resources\n if self.paused:\n time.sleep(0.1)\n\n else:\n sequence_finished = self._run_next_automation_action(sequence_iterator)\n\n if sequence_finished:\n break", "def stop(self):\n self.finished = True", "def run(self):\n #=======================================================================\n #\n # TODO: Replace this do-nothing code with some which does something.\n # Don't worry about looping (though you can), since this will be called\n # over and over again by the main appliance loop.\n #\n #=======================================================================\n self.logger.info('Nothing to do; sleeping for a while.')\n sleep(10)\n\n # Return something truthy to continue, anything else to exit.\n return True", "def should_continue():\n\n return LoopContinueEvent()", "async def on_step(self, iteration: int):\n raise NotImplementedError", "def execute(self):\n\t\tself.drivetrain.bad_auto_drive()\n\t\tself.drivetrain.stop_robot()", "def _gracefully_stop(self):\n pass" ]
[ "0.75177705", "0.70118654", "0.6976013", "0.69562066", "0.6870884", "0.6823549", "0.6785878", "0.67788917", "0.6727162", "0.6682127", "0.6655598", "0.6610465", "0.65924037", "0.65851337", "0.65685076", "0.65320814", "0.65093255", "0.64936584", "0.6490016", "0.64489716", "0.6447058", "0.6444502", "0.6436446", "0.64304173", "0.6426575", "0.642324", "0.64193714", "0.64193714", "0.64193714", "0.6385306", "0.637061", "0.6367249", "0.6351003", "0.63364685", "0.6335299", "0.63293856", "0.6323243", "0.630117", "0.6286622", "0.6247432", "0.6237265", "0.6214538", "0.62135327", "0.62091696", "0.6176258", "0.61739033", "0.61610454", "0.61610454", "0.61574155", "0.6154808", "0.6154808", "0.6153005", "0.61426914", "0.61216605", "0.61194605", "0.61172426", "0.6111807", "0.6092584", "0.6086174", "0.6075774", "0.60724545", "0.6072285", "0.6043091", "0.6042745", "0.6042745", "0.60316265", "0.6029762", "0.6025949", "0.60234314", "0.6007268", "0.60065705", "0.600638", "0.6000937", "0.59994185", "0.5992404", "0.59880984", "0.5983107", "0.59827346", "0.59797394", "0.5977402", "0.5970825", "0.5960854", "0.5956797", "0.59525037", "0.5952232", "0.59486127", "0.59446484", "0.59370077", "0.5932589", "0.5922488", "0.5919397", "0.59178734", "0.59083545", "0.5906663", "0.5906409", "0.59037006", "0.58988994", "0.58958", "0.5894909", "0.58891094" ]
0.6420328
26
Should stop the step early and silently
def test_stop_step_silent(self): contents = '\n'.join([ 'import cauldron as cd', 'cd.shared.test = 0', 'cd.step.breathe()', 'cd.shared.test = 1', 'cd.step.stop(silent=True)', 'cd.shared.test = 2' ]) support.create_project(self, 'homeritis') support.add_step(self, contents=contents) support.run_command('run') project = cd.project.get_internal_project() step = project.steps[0] self.assertEqual(project.shared.fetch('test'), 1) self.assertEqual(-1, step.dom.find('cd-StepStop'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_step(self) -> None:", "def test_step_stop_aborted(self, _step: PropertyMock):\n _step.return_value = None\n es = exposed.ExposedStep()\n es.stop()", "def stopTestRun(self):", "def _gracefully_stop(self):\n pass", "def run(self):\n sys.exit(-1)", "def pre_stop(self):", "def halt(*_, **kwargs):\n raise ExecutionFinished(\"Reached halt\")", "def stop() -> None:", "def _prepare_to_stop(self):\n pass", "def bail(self, msg):\n self.logger.error(\"Can't handle follower result: {}\".format(msg))\n self.call('ctrl', 'stop_full')\n sys.exit(1)", "def stop(self):\n self.halt = True", "def run_skip(self):\n pass", "def _step(self):\n pass", "def run_one_step(self):\n pass", "def test_case_01(self):\n if True:\n self.fail()", "def _step(self) -> None:", "def step(self):\n\n pass", "def need_stop(self, path):", "def stop(self) -> None:", "def stop(self) -> None:", "def aborting(self):\n \n pass", "def endOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def _step(self, whence):\n pass", "def stop_check(self):\n pass", "def test_stop_step_no_halt(self):\n support.create_project(self, 'homer2')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.shared.other = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop()',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.other = 1'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertEqual(project.shared.fetch('other'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def post_stop(self):", "def stop():", "def stop():", "def stop():", "def stop():", "def shouldEndOnError(error):\n if Settings.stopExecutionOnError:\n System.stopExecution(error)\n Summary.printSummary()", "def _stop(self):\n return True", "def abort(self):\n print(\"abort\")", "def _run(self):\n logging.warning('-> perform EMPTY experiment...')", "def stopCond(self):\n\t\treturn False", "def TestOneStep(self):\n pass", "def test_terminate_run(self):\n pass", "def _stop(self):", "def _step(self):\n title()\n self.runCount = 1\n self.experiment.pause = False\n self._runExperiment()\n self.pause = True", "def stop(self):\n self._should_run = False", "def stop(self):\n\t\tpass", "def stop(self):\r\n pass", "def stop(self) -> None:\n ...", "def stop(self):", "def stop(self):", "def stop(self):\n return", "def Halt(self):\n self.run_halt = True", "def run_starter(self, expect_to_fail=False):", "def stopProducing(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def test_second_step_strict(self):\n with self.assertRaises(Exception):\n self.run_step('S02-errors.py', allow_failure=False)", "def stop(self):\r\n self.terminating = True", "def stop_fixture(self):\n pass", "def after_step():\n raise NotImplementedError", "def interrupt(self):\n return True", "def stop (self):\n pass", "def stop (self):\n pass", "def test_stop_step_and_halt(self):\n support.create_project(self, 'homer')\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 0',\n 'cd.step.breathe()',\n 'cd.shared.test = 1',\n 'cd.step.stop(halt=True)',\n 'cd.shared.test = 2'\n ]))\n support.add_step(self, contents='\\n'.join([\n 'import cauldron as cd',\n 'cd.shared.test = 3'\n ]))\n\n support.run_command('run')\n project = cd.project.get_internal_project()\n step = project.steps[1]\n\n self.assertEqual(project.shared.fetch('test'), 1)\n self.assertNotEqual(-1, step.dom.find('cd-StepStop'))", "def perform_step(self) -> None:\n pass", "def after_step(context, step):\n if context.config.userdata.getbool(\"debug\") and step.status == \"failed\":\n spost_mortem(step.exc_traceback)", "def interrupted(self):\n self.end()", "def stop(self):\n self.finished = True", "def run_out_of_time(self):\n self.out_of_time = True", "def foreceStop(self):\n self.__success = False\n self.stop()", "def interrupted(self):\n pass", "def step(self, **kwargs):\n pass", "def exit(self):\n self.runtime.halted = True", "def step(self, action):", "def stopTest(self, test):\n self.complete_output()", "def complete_run():\n pass", "def stop(self) -> None:\n pass", "def experiment3():\n raise FAKE_ERROR", "def stop():\n raise StopIteration", "def stop(self):\n self._run = False", "def start(self):\n try:\n pass\n except:\n pass", "def abort(self):\n try:\n self.acqRunning = False\n except:\n print('Cannot abort properly')", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def test_run_ended(self):", "def startOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def ShutDown(self):\n self.stop = True", "def test_long_run_case_that_we_want_to_skip():\n time.sleep(30)\n assert 0", "def stop(self):\n # All done!\n super().stop()", "def stopTest(self, test):", "def finish(self):\n self.check_required()\n while self.options.next_breakpoint:\n self.options.shift_breakpoint()\n self.arguments = self.options.get_arguments()\n self.check_required()", "def quitting(self):\n pass", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def end_phase():\n pass", "def step(self, step=None):\n pass", "def do_exit(self, _):\n return True", "def abort(self):\n raise NotImplementedError" ]
[ "0.70605266", "0.69810665", "0.69688714", "0.69465715", "0.6923059", "0.69141006", "0.69115365", "0.6902989", "0.6805761", "0.67983", "0.67979056", "0.6749256", "0.6744562", "0.6743129", "0.67402655", "0.6723573", "0.67115927", "0.6710811", "0.67052597", "0.67052597", "0.67027473", "0.6694152", "0.6666316", "0.6654113", "0.66389775", "0.6628216", "0.6625316", "0.6625316", "0.6625316", "0.6625316", "0.65829587", "0.65788394", "0.65367055", "0.6525809", "0.6524626", "0.6520916", "0.6507875", "0.64813143", "0.64428663", "0.64337605", "0.64263415", "0.64240813", "0.64206654", "0.6416718", "0.6416718", "0.641617", "0.6413611", "0.64115065", "0.6403745", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6395362", "0.6393941", "0.63916004", "0.6380646", "0.6363989", "0.6360634", "0.6358543", "0.6358543", "0.6356402", "0.63414073", "0.63268507", "0.6320263", "0.6316399", "0.6315978", "0.6314413", "0.6313768", "0.6286653", "0.6284893", "0.62801635", "0.6271077", "0.6262231", "0.62503594", "0.6244468", "0.62293005", "0.6227969", "0.6225168", "0.62234974", "0.6211846", "0.62096643", "0.62032217", "0.6197731", "0.6194583", "0.6188974", "0.6173519", "0.61724824", "0.6170828", "0.616622", "0.61652124", "0.6148422", "0.6144971", "0.6139056" ]
0.65623134
32
Should get internal project on the third attempt after one attempt to check before entering the retry and sleep loop and then two iterations through the loop before encountering a nonNone value.
def test_get_internal_project( self, sleep: MagicMock, internal_project: PropertyMock ): project = exposed.ExposedProject() internal_project.side_effect = [None, None, None, 'test'] result = project.get_internal_project() self.assertEqual('test', result) self.assertEqual(2, sleep.call_count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_retry_run(self):\n pass", "def _retry_occurred(self):", "def retry(times):\n return repeat_with_success_at_least(times, 1)", "def test_get_internal_project_fail(\n self,\n sleep: MagicMock,\n time_time: MagicMock,\n internal_project: PropertyMock\n ):\n project = exposed.ExposedProject()\n time_time.side_effect = range(20)\n internal_project.return_value = None\n result = project.get_internal_project()\n self.assertIsNone(result)\n self.assertEqual(10, sleep.call_count)", "async def _wait_retry(self) -> None:\n # Sleep 2^tries + 0…tries*3 seconds between retries\n self.retry_task = asyncio.create_task(\n asyncio.sleep(2 ** min(9, self.tries) + random.randint(0, self.tries * 3))\n )\n await self.retry_task\n self.retry_task = None", "def test_retry(self):\n self.response.raise_for_status.side_effect = \\\n [requests.HTTPError(), None]\n\n wsgi._retryable('get', 'http://some.thing')\n\n assert self.session.get.call_count == 2", "def retry(self):\n # XXX: check whether it is possible to distingish \n # between the error conditions and set meaningfull exitcode\n return False", "def always_retry(e):\n return True", "def retry(self):\n return False", "def decide_to_retry(error):\n return True", "def testGoodRetry(self):\n self.p = start_short_timeout_app_process()\n gateway = JavaGateway()\n connections = gateway._gateway_client.deque\n try:\n # Call #1\n gateway.jvm.System.currentTimeMillis()\n str_connection = str(connections[0])\n\n # Call #2 after, should not create new connections if the system is\n # not too slow :-)\n gateway.jvm.System.currentTimeMillis()\n self.assertEqual(1, len(connections))\n str_connection2 = str(connections[0])\n self.assertEqual(str_connection, str_connection2)\n\n sleep(0.5)\n gateway.jvm.System.currentTimeMillis()\n self.assertEqual(1, len(connections))\n str_connection3 = str(connections[0])\n # A new connection was automatically created.\n self.assertNotEqual(str_connection, str_connection3)\n except Py4JError:\n self.fail(\"Should retry automatically by default.\")\n finally:\n gateway.shutdown()\n self.p.join()", "def retryFileCopy(self):\n self.areCopiesValid = self.checkCopiedFiles()\n copyRetryCount = 5\n while(copyRetryCount > 1 and not self.areCopiesValid):\n print(\"Something failed in copy, retrying \" + str(copyRetryCount))\n self.firstTimeSetup()\n self.areCopiesValid = self.checkCopiedFiles()\n copyRetryCount -= 1", "def testRetryCount(self):\n change = ChangeState(self.config, \"changestate_t\")\n\n locationAction = self.daoFactory(classname=\"Locations.New\")\n locationAction.execute(\"site1\", pnn=\"T2_CH_CERN\")\n\n testWorkflow = Workflow(spec=self.specUrl, owner=\"Steve\",\n name=\"wf001\", task=self.taskName)\n testWorkflow.create()\n testFileset = Fileset(name=\"TestFileset\")\n testFileset.create()\n\n for i in range(4):\n newFile = File(lfn=\"File%s\" % i, locations=set([\"T2_CH_CERN\"]))\n newFile.create()\n testFileset.addFile(newFile)\n\n testFileset.commit()\n testSubscription = Subscription(fileset=testFileset,\n workflow=testWorkflow,\n split_algo=\"FileBased\")\n testSubscription.create()\n\n splitter = SplitterFactory()\n jobFactory = splitter(package=\"WMCore.WMBS\",\n subscription=testSubscription)\n jobGroup = jobFactory(files_per_job=1)[0]\n\n assert len(jobGroup.jobs) == 4, \\\n \"Error: Splitting should have created four jobs.\"\n\n testJobA = jobGroup.jobs[0]\n testJobA[\"user\"] = \"sfoulkes\"\n testJobA[\"group\"] = \"DMWM\"\n testJobA[\"taskType\"] = \"Processing\"\n testJobB = jobGroup.jobs[1]\n testJobB[\"user\"] = \"sfoulkes\"\n testJobB[\"group\"] = \"DMWM\"\n testJobB[\"taskType\"] = \"Processing\"\n testJobC = jobGroup.jobs[2]\n testJobC[\"user\"] = \"sfoulkes\"\n testJobC[\"group\"] = \"DMWM\"\n testJobC[\"taskType\"] = \"Processing\"\n testJobD = jobGroup.jobs[3]\n testJobD[\"user\"] = \"sfoulkes\"\n testJobD[\"group\"] = \"DMWM\"\n testJobD[\"taskType\"] = \"Processing\"\n\n change.persist([testJobA], \"created\", \"submitcooloff\")\n change.persist([testJobB], \"created\", \"jobcooloff\")\n change.persist([testJobC, testJobD], \"new\", \"none\")\n\n testJobA.load()\n testJobB.load()\n testJobC.load()\n testJobD.load()\n\n assert testJobA[\"retry_count\"] == 1, \\\n \"Error: Retry count is wrong.\"\n assert testJobB[\"retry_count\"] == 1, \\\n \"Error: Retry count is wrong.\"\n assert testJobC[\"retry_count\"] == 0, \\\n \"Error: Retry count is wrong.\"\n assert testJobD[\"retry_count\"] == 0, \\\n \"Error: Retry count is wrong.\"\n\n return", "def testBadRetry(self):\n self.p = start_example_app_process()\n gateway = JavaGateway(\n gateway_parameters=GatewayParameters(read_timeout=0.250))\n try:\n value = gateway.entry_point.getNewExample().sleepFirstTimeOnly(500)\n self.fail(\n \"Should never retry once the first command went through.\"\n \"number of calls made: {0}\".format(value))\n except Py4JError:\n self.assertTrue(True)\n finally:\n gateway.shutdown()\n self.p.join()", "def waitUntilSuccess():", "def _retry_refresh(wrapper, *a3, **k3):\n return func(wrapper, *a3, **k3)", "def retry(func, repeat=3, delay=tickTime * 2):\n\twhile repeat:\n\t\tresult = func()\n\n\t\tif result is None and delay and repeat != 1:\n\t\t\tsleep(delay)\n\n\t\telse:\n\t\t\treturn result\n\n\t\trepeat -= 1", "def retry(self):\n return self._retry", "def test_retry_build_on_compute_error(self):\n # Now that the bug is fixed, we should assert that the server goes to\n # ACTIVE status and is on the second host after the retry operation.\n server = dict(\n name='retry-test',\n imageRef=self.image_id,\n flavorRef=self.flavor_id)\n server = self.admin_api.post_server({'server': server})\n self.addCleanup(self.admin_api.delete_server, server['id'])\n server = self._wait_for_instance_status(server['id'], 'ACTIVE')\n\n # Assert that the host is not the failed host.\n self.assertNotEqual(self.failed_host,\n server['OS-EXT-SRV-ATTR:host'])\n\n # Assert that we retried.\n self.assertEqual(2, self.attempts)", "def test_retry_service(self):\n network = NetworkAPI(\"main\")\n network.list_of_apis = collections.deque([MockApi])\n assert \"\" == network.get_transaction(TEST_TX)", "def retry_request(\n self,\n tapi_exception,\n error_message,\n repeat_number,\n response,\n request_kwargs,\n api_params,\n **kwargs\n ):\n return False", "def test_reconnect_loop(self, auth_mock, connected_mock, init_update_mock):\n\n # an unsuccesful api call should not reconnect, but do not resolve the task so reconnect gets tried again\n auth_mock.return_value = False\n self.create_org(status=DISCONNECTED)\n response = self.app.post('/adapter/test/reconnect', headers={'X-AppEngine-TaskExecutionCount': 10})\n self.assertEqual(response.status_code, 423)\n connected_mock.assert_not_called()\n init_update_mock.assert_not_called()\n\n # stop trying after a while (resolve the task)\n response = self.app.post('/adapter/test/reconnect', headers={'X-AppEngine-TaskExecutionCount': 43})\n self.assertEqual(response.status_code, 204)\n connected_mock.assert_not_called()\n init_update_mock.assert_not_called()\n\n # if api call works reconnect the org\n auth_mock.return_value = True\n response = self.app.post('/adapter/test/reconnect', headers={'X-AppEngine-TaskExecutionCount': 10})\n self.assertEqual(response.status_code, 204)\n connected_mock.assert_called_once()\n init_update_mock.assert_called_once()\n\n # an already connected org will get ignored\n connected_mock.reset_mock()\n init_update_mock.reset_mock()\n self.create_org(status=CONNECTED)\n response = self.app.post('/adapter/test/reconnect', headers={'X-AppEngine-TaskExecutionCount': 10})\n self.assertEqual(response.status_code, 204)\n connected_mock.assert_not_called()\n init_update_mock.assert_not_called()", "def _retry_bootstrap_candidates(self):\n if __debug__: dprint(\"unable to resolve all bootstrap addresses\", level=\"warning\")\n for counter in count(1):\n yield 1.0 if counter < 30 else 30.0\n if __debug__: dprint(\"attempt #\", counter, level=\"warning\")\n candidates = get_bootstrap_candidates(self)\n for candidate in candidates:\n if candidate is None:\n break\n else:\n if __debug__: dprint(\"resolved all bootstrap addresses\")\n self._bootstrap_candidates = dict((candidate.sock_addr, candidate) for candidate in candidates if candidate)\n break", "def test_single_scan_while_pno(self):\n self.log.info(\"Check connection through PNO for reference network\")\n current_network = self.dut.droid.wifiGetConnectionInfo()\n self.log.info(\"Current network: {}\".format(current_network))\n asserts.assert_true('network_id' in current_network, NETWORK_ID_ERROR)\n asserts.assert_true(current_network['network_id'] >= 0, NETWORK_ERROR)\n self.log.info(\"Kicking PNO for reference network\")\n self.attenuators[ATTENUATOR].set_atten(90)\n time.sleep(10) #wait for PNO to be kicked\n self.log.info(\"Starting single scan while PNO\")\n self.wifi_scanner_single_scan(self.default_scan_setting)\n self.attenuators[ATTENUATOR].set_atten(0)\n self.log.info(\"Check connection through PNO for reference network\")\n time.sleep(30) #wait for connection through PNO\n current_network = self.dut.droid.wifiGetConnectionInfo()\n self.log.info(\"Current network: {}\".format(current_network))\n asserts.assert_true('network_id' in current_network, NETWORK_ID_ERROR)\n asserts.assert_true(current_network['network_id'] >= 0, NETWORK_ERROR)\n time.sleep(10) #wait for IP to be assigned\n asserts.assert_true(\n wutils.validate_connection(self.dut, self.ping_addr),\n \"Error, No internet connection for current network\")\n wutils.wifi_forget_network(self.dut,\n self.reference_networks[0][\"2g\"][\"SSID\"])", "def _ShouldRetry(self, type_, value_, traceback):\n return True", "def retry(nattempts, exception=None):\n \n def tryIt(func):\n def wrapper(*args, **kwargs):\n attempts = 0\n while attempts < nattempts - 1:\n try:\n return func(*args, **kwargs)\n except (exception if exception is not None else Exception):\n attempts += 1\n return func(*args, **kwargs)\n return wrapper\n return tryIt", "def test_successReset(self):\n for i in range(3):\n self.circuit_breaker.failure()\n self.circuit_breaker.success()\n available0 = self.circuit_breaker.available()\n self.circuit_breaker.failure()\n available1 = self.circuit_breaker.available()\n self.circuit_breaker.failure()\n available2 = self.circuit_breaker.available()\n self.circuit_breaker.failure()\n available3 = self.circuit_breaker.available()\n available4 = self.circuit_breaker.available()\n self.assertEqual((available0, available1, available2, available3, available4),\n (True, True, True, False, False))", "def test_retry_failed_jobs(sleep, client, job_retry_on_query):\n\n retry_notfound = google.api_core.retry.Retry(\n predicate=google.api_core.retry.if_exception_type(\n google.api_core.exceptions.NotFound\n )\n )\n retry_badrequest = google.api_core.retry.Retry(\n predicate=google.api_core.retry.if_exception_type(\n google.api_core.exceptions.BadRequest\n )\n )\n\n if job_retry_on_query is None:\n reason = \"rateLimitExceeded\"\n else:\n reason = \"notFound\"\n\n err = dict(reason=reason)\n responses = [\n dict(status=dict(state=\"DONE\", errors=[err], errorResult=err)),\n dict(status=dict(state=\"DONE\", errors=[err], errorResult=err)),\n dict(status=dict(state=\"DONE\", errors=[err], errorResult=err)),\n dict(status=dict(state=\"DONE\")),\n dict(rows=[{\"f\": [{\"v\": \"1\"}]}], totalRows=\"1\"),\n ]\n\n def api_request(method, path, query_params=None, data=None, **kw):\n response = responses.pop(0)\n if data:\n response[\"jobReference\"] = data[\"jobReference\"]\n else:\n response[\"jobReference\"] = dict(\n jobId=path.split(\"/\")[-1], projectId=\"PROJECT\"\n )\n return response\n\n conn = client._connection = make_connection()\n conn.api_request.side_effect = api_request\n\n if job_retry_on_query == \"Query\":\n job_retry = dict(job_retry=retry_notfound)\n elif job_retry_on_query == \"Both\":\n # This will be overridden in `result`\n job_retry = dict(job_retry=retry_badrequest)\n else:\n job_retry = {}\n job = client.query(\"select 1\", **job_retry)\n\n orig_job_id = job.job_id\n job_retry = (\n dict(job_retry=retry_notfound)\n if job_retry_on_query in (\"Result\", \"Both\")\n else {}\n )\n result = job.result(**job_retry)\n assert result.total_rows == 1\n assert not responses # We made all the calls we expected to.\n\n # The job adjusts it's job id based on the id of the last attempt.\n assert job.job_id != orig_job_id\n assert job.job_id == conn.mock_calls[3][2][\"data\"][\"jobReference\"][\"jobId\"]\n\n # We had to sleep three times\n assert len(sleep.mock_calls) == 3\n\n # Sleeps are random, however they're more than 0\n assert min(c[1][0] for c in sleep.mock_calls) > 0\n\n # They're at most 2 * (multiplier**(number of sleeps - 1)) * initial\n # The default multiplier is 2\n assert max(c[1][0] for c in sleep.mock_calls) <= 8\n\n # We can ask for the result again:\n responses = [\n dict(rows=[{\"f\": [{\"v\": \"1\"}]}], totalRows=\"1\"),\n ]\n orig_job_id = job.job_id\n result = job.result()\n assert result.total_rows == 1\n assert not responses # We made all the calls we expected to.\n\n # We wouldn't (and didn't) fail, because we're dealing with a successful job.\n # So the job id hasn't changed.\n assert job.job_id == orig_job_id", "def step_impl(context, instance_number):\n num_try = 60\n interval = 10\n success = False\n\n for i in range(num_try):\n time.sleep(interval)\n context.service_instances = context.service.status()['replicaStatus']\n if len(context.service_instances) == int(instance_number):\n success = True\n break\n context.dl.logger.debug(\"Step is running for {:.2f}[s] and now Going to sleep {:.2f}[s]\".format((i + 1) * interval,\n interval))\n\n assert success, \"TEST FAILED: Expected {}, Got {}\".format(instance_number, len(context.service_instances))", "def _retry(self, result, method, url, params_dict, **kwargs):\n return result", "def test_returns_challenging_projects_if_difficulty_set_to_changelling(self):\n # Arrange\n self.test_project_2.private = False\n # Set difficulty of test_project_2 to hard.\n self.test_project_2.difficulty = ProjectDifficulty.CHALLENGING.value\n self.test_project_2.save()\n # Act\n response = self.client.get(\n self.url,\n headers={\"Authorization\": self.user_session_token},\n query_string={\"difficulty\": \"CHALLENGING\"},\n )\n # Assert\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(response.json[\"results\"]), 1)\n self.assertEqual(\n response.json[\"results\"][0][\"projectId\"], self.test_project_2.id\n )", "def retry_branch_on(branch_id, time_min):\n base_url = BRANCHES_SETTINGS[branch_id]['base_url']\n pump_enabled = BRANCHES_SETTINGS[branch_id]['pump_enabled']\n # If branch is not deactivated. It will be stoped by internal process in 2 minutes\n time_min = time_min + 2\n\n try:\n for attempt in range(2):\n try:\n if base_url is None:\n response_off = garden_controller.branch_on(branch_id=branch_id, pump_enable=pump_enabled, branch_alert=time_min)\n logging.info('response {0}'.format(response_off))\n\n if (response_off[branch_id]['state'] != 1):\n logging.error('Branch {0} cant be turned on. response {1}'.format(branch_id, str(response_off)))\n time.sleep(2)\n continue\n else:\n return response_off\n else:\n response_off = requests.get(url=base_url, params={'branch_id': branch_id, 'branch_alert': time_min}, timeout=(5, 5))\n logging.info('response {0}'.format(str(response_off.text)))\n response_off = json.loads(response_off.text)\n\n if (response_off[branch_id]['state'] != 0):\n logging.error('Branch {0} cant be turned on. response {1}'.format(branch_id, response_off))\n time.sleep(2)\n continue\n else:\n return response_off\n\n except Exception as e:\n logging.error(e)\n logging.error(\"Can't turn on {0} branch. Exception occured. {1} try out of 2\".format(branch_id, attempt))\n time.sleep(2)\n continue\n\n raise Exception(\"Can't turn on {0} branch. Retries limit reached\".format(branch_id))\n except Exception as e:\n logging.error(e)\n logging.error(\"Can't turn on branch id={0}. Exception occured\".format(branch_id))\n raise Exception(\"Can't turn on {0} branch\".format(branch_id))", "def task3(self):\n\n pass", "def test_request_retries(self, nosleep, method):\n # Dummies for K8s API URL and `requests` session.\n url = 'http://localhost:12345/'\n client = k8s.requests.Session()\n\n # Test function must not return a response but indicate an error.\n ret = k8s.request(client, method, url, None, None)\n assert ret == ({}, True)\n\n # Windows is different. No idea why but it refuses to connect more than\n # three times. Mac and Linux behave as expected.\n if sys.platform.startswith(\"win\"):\n assert nosleep.call_count == 3\n else:\n assert nosleep.call_count == 20", "def test_wait_for_db(self, ts):\n \"\"\" Here we are checking that the wait_for_db command will try the database 5 times\n and on the sixth time it'll be successful and continue \n \"\"\"\n with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:\n gi.side_effect=[OperationalError] * 5 + [True]\n call_command('wait_for_db')\n self.assertEqual(gi.call_count, 6)", "def retry(self, times):\n return Retry((requests.ConnectionError, requests.Timeout), times)", "async def _retry_get(url: str, retries: int, **kwargs):\r\n retries -= 1\r\n if retries >= 0:\r\n logger.warning(\r\n f\"Retrying request to {url}. Retries remaining: {retries}\")\r\n return await asyncio.create_task(\r\n self.get(url, retries, **kwargs))\r\n logger.error(\r\n f\"Max retries exceeded: {url}. URL can not be navigated.\")", "def attempt_to_acquire_leader(self, permanent=False):", "def lagopus_wait_connect_db(retry, wait):\n cnx = lagopus_connect_db()\n while not cnx and retry != 0:\n if retry > 0:\n retry -= 1\n print(\"Failed to conenct to MySQL, retrying in {}s...\".format(wait))\n time.sleep(wait)\n cnx = lagopus_connect_db()\n\n return cnx", "def get_retry_delay(self, last_delay):\n return last_delay * 2", "def experiment3():\n raise FAKE_ERROR", "def _fetch_url(url, attempt=1, session=requests.Session()):\n try:\n # Without this, citybureau.org throttles the first request.\n headers = {'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36'}\n r = session.get(url, headers=headers)\n r.raise_for_status()\n return r\n except requests.exceptions.RequestException as e:\n if attempt >= 5:\n return None\n else:\n print(e)\n wait = 2 ** attempt\n print('waiting for {0} seconds'.format(wait))\n time.sleep(wait)\n return _fetch_url(url, attempt + 1)", "def test_loopFailure_recovery(self):\n dbpool, _ignore_qpool, clock, _ignore_performerChosen = self._setupPools()\n fakeNow = datetime.datetime(2012, 12, 12, 12, 12, 12)\n\n oldAssign = JobItem.assign\n @inlineCallbacks\n def _assign(self, when, overdue):\n work = yield self.workItem()\n if work.a == -2:\n raise ValueError(\"oops\")\n yield oldAssign(self, when, overdue)\n\n self.patch(JobItem, \"assign\", _assign)\n\n # Let's create a couple of work items directly, not via the enqueue\n # method, so that they exist but nobody will try to immediately execute\n # them.\n\n @transactionally(dbpool.pool.connection)\n @inlineCallbacks\n def setup(txn):\n # Failing\n yield DummyWorkItem.makeJob(\n txn, a=-2, b=1, notBefore=fakeNow - datetime.timedelta(20 * 60)\n )\n # OK\n yield DummyWorkItem.makeJob(\n txn, a=1, b=0, notBefore=fakeNow - datetime.timedelta(20 * 60, 5)\n )\n yield setup\n clock.advance(20 - 12)\n\n @transactionally(dbpool.pool.connection)\n def check(txn):\n return JobItem.all(txn)\n\n jobs = yield check\n self.assertEqual(len(jobs), 1)\n self.assertEqual(jobs[0].assigned, None)\n self.assertEqual(jobs[0].isAssigned, 0)\n self.assertEqual(jobs[0].failed, 1)\n self.assertGreater(jobs[0].notBefore, datetime.datetime.utcnow() + datetime.timedelta(seconds=30))", "def try_get(url, ntries=3, delay=10, verbose = False):\r\n attempts = ntries\r\n\r\n while attempts>0:\r\n attempts_try = attempts\r\n if verbose:\r\n print(\"%d attemps left, trying %s\" % (attempts, url))\r\n try:\r\n result = requests.get(url)\r\n code = \"Ok\"\r\n attempts = 0\r\n result.raise_for_status()\r\n except requests.exceptions.HTTPError as errh:\r\n attempts = 0\r\n code = \"Error\"\r\n result = \"Http Error: %s\" % errh\r\n if verbose:\r\n print (result)\r\n except requests.exceptions.ConnectionError as errc:\r\n attempts = attempts_try - 1\r\n code = \"Error\"\r\n result = \"Error Connecting: %s\" % errc\r\n if verbose:\r\n print (result)\r\n sleep(delay)\r\n except requests.exceptions.Timeout as errt:\r\n attempts = attempts_try - 1\r\n code = \"Error\"\r\n result = \"Timeout Error: %s\" % errt\r\n if verbose:\r\n print (result)\r\n sleep(delay)\r\n except requests.exceptions.RequestException as err:\r\n attempts = 0\r\n code = \"Error\"\r\n if verbose:\r\n print (result)\r\n result = \"Oops: Something Else %s\" % err\r\n if verbose:\r\n print(\" \",code, result if code==\"Error\" else \"\")\r\n return {'code': code, 'result': result}", "def wait_task(self, task_id, time_before_retry = 100):\n while True:\n res = AlgoliaUtils_request(self.client.headers, self.read_hosts, \"GET\", \"/1/indexes/%s/task/%d/\" % (self.url_index_name, task_id), self.client.timeout)\n if (res[\"status\"] == \"published\"):\n return res\n time.sleep(time_before_retry / 1000.)", "def on_success(self, queue, result):\n updated_issues_count, delay = result\n self.clone(delayed_for=delay + randint(0, 10))", "def _do_fail_retry(self, event):\n if self._retries > 0:\n self._retries -= 1\n self._state_machine.retry()\n else:\n self._state_machine.abort(result=event.result)", "def _do_fail_retry(self, event):\n if self._retries > 0:\n self._retries -= 1\n self._state_machine.retry()\n else:\n self._state_machine.abort(result=event.result)", "def retryManager(self):\n if self.retry_counter >= self.retry_forget_time:\n self.retry_counter = 0\n if self.retry_count > 0:\n self.retry_count = 0\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n self.db['retry_count'] = self.retry_count\n self.db.close()\n self.logQ.put('Retry count of {0} has been reset to {1} after Retry forget time of {2}'.format(\n self.retry_limit,\n self.retry_count,\n self.retry_forget_time))\n self.retry_counter += 1", "def sleep_until_next_revision_ready(self, revision_list):\n api = self.api\n\n revision_mapping = {}\n gs_jobs = []\n buildbot_jobs = []\n\n for revision in revision_list:\n url = revision.get_next_url()\n buildbot_job = revision.get_buildbot_locator()\n if url:\n gs_jobs.append({'type': 'gs', 'location': url})\n revision_mapping[url] = revision\n if buildbot_job:\n buildbot_jobs.append(buildbot_job)\n revision_mapping[buildbot_job['job_name']] = revision\n\n jobs_config = {'jobs': buildbot_jobs + gs_jobs}\n\n script = api.resource('wait_for_any.py')\n args_list = [api.m.gsutil.get_gsutil_path()] if gs_jobs else []\n\n try:\n step_name = 'Waiting for revision ' + revision_list[0].revision_string\n if len(revision_list) > 1:\n step_name += ' and %d other revision(s).' % (len(revision_list) - 1)\n api.m.python(\n str(step_name),\n script,\n args_list,\n stdout=api.m.json.output(),\n stdin=api.m.json.input(jobs_config),\n ok_ret={0, 1})\n except api.m.step.StepFailure as sf: # pragma: no cover\n if sf.retcode == 2: # 6 days and no builds finished.\n for revision in revision_list:\n revision.status = revision_state.RevisionState.FAILED\n for revision in revision_list:\n if revision.status == revision.TESTING:\n self.surface_result('TEST_TIMEOUT')\n if revision.status == revision.BUILDING:\n self.surface_result('BUILD_TIMEOUT')\n return None # All builds are failed, no point in returning one.\n else: # Something else went wrong.\n raise\n\n step_results = api.m.step.active_result.stdout\n build_failed = api.m.step.active_result.retcode\n\n if build_failed:\n # Explicitly make the step red.\n api.m.step.active_result.presentation.status = api.m.step.FAILURE\n\n if not step_results:\n # For most recipe_simulation_test cases.\n return None\n\n failed_jobs = step_results.get('failed', [])\n completed_jobs = step_results.get('completed', [])\n last_failed_revision = None\n assert failed_jobs or completed_jobs\n\n # Marked all failed builds as failed\n for job in failed_jobs:\n last_failed_revision = revision_mapping[str(job.get(\n 'location', job.get('job_name')))]\n if 'job_url' in job:\n url = job['job_url']\n api.m.step.active_result.presentation.links['Failed build'] = url\n last_failed_revision.status = revision_state.RevisionState.FAILED\n\n # Return a completed job if available.\n for job in completed_jobs:\n if 'job_url' in job: # pragma: no cover\n url = job['job_url']\n api.m.step.active_result.presentation.links['Completed build'] = url\n return revision_mapping[str(job.get(\n 'location', job.get('job_name')))]\n\n # Or return any of the failed revisions.\n return last_failed_revision", "def scan(self):\n\n from urllib3.exceptions import MaxRetryError, NewConnectionError\n #While we haven't successfully run\n while True:\n try:\n #Try to run\n self.run()\n break\n except:\n #Otherwise restart\n sys.exit()\n #self.restart()\n time.sleep(10)", "def work3():\n logging.info(\"work3 doing a job\")\n if random.randint(1, 5) == 1:\n logging.error(\"Error in work3: bad input\")", "def set_retry_timeout(self, retry_timeout):", "def _check_connection(self):\n for _ in range(3):\n try:\n r = get(f\"http://{self.ip}/student/{self.user}\")\n if r.ok:\n break \n except OSError as e:\n print(f\"Connection error:\\n{e}\")\n sleep(2)\n else:\n raise ConnectionError(f\"Can not connect to server with params ip: {self.ip}, user: {self.user}\")", "def test_retry_failed(self):\n self.response.raise_for_status.side_effect = requests.HTTPError()\n\n with pytest.raises(requests.HTTPError):\n wsgi._retryable('get', 'http://some.thing')\n\n assert self.session.get.call_count == wsgi.MAX_RETRIES\n assert wsgi.MAX_RETRIES > 1", "def test_solicitation_max_retry(self):\n self.autoconflayer._solicitation_max_retry = 6\n waittime = self.autoconflayer._solicitation_timeout * 10\n self.autoconflayer.start_process()\n interest = Interest(Name('/foo/bar'))\n self.queue_from_higher.put([None, interest])\n\n deadline = datetime.utcnow() + timedelta(seconds=waittime)\n tolower = []\n while datetime.utcnow() < deadline:\n try:\n data = self.queue_to_lower.get(timeout=waittime/10)\n tolower.append(data)\n except queue.Empty:\n pass\n try:\n tohigher = self.queue_to_higher.get(timeout=waittime/10)\n except queue.Empty:\n self.fail()\n bcfid = self.faceidtable.get_or_create_faceid(AddressInfo(('127.255.255.255', 4242), 0))\n self.assertIsNotNone(bcfid)\n solictiation = Interest(Name('/autoconfig/forwarders'))\n solictiation_count = len([1 for data in tolower if data == [bcfid, solictiation]])\n self.assertEqual(6, solictiation_count)\n self.assertIsNone(tohigher[0])\n self.assertIsInstance(tohigher[1], Nack)\n self.assertEqual('/foo/bar', tohigher[1].name.components_to_string())\n self.assertEqual(NackReason.NO_ROUTE, tohigher[1].reason)", "def trial(self):\n pass", "def test_state_after_failure(self):\n pass", "def _compute_next_task_for_user(user, project, language_pair):\n # Check if project is valid for the given user.\n if not project in user.project_set.all():\n LOGGER.debug('User {0} does not work on project {1}.'.format(\n user, project\n ))\n return None\n \n # Check if language_pair is valid for the given user.\n if not user.groups.filter(name=language_pair):\n LOGGER.debug('User {0} does not know language pair {1}.'.format(\n user, language_pair))\n return None\n\n # Check if there exists a current HIT for the given user.\n current_hitmap = UserHITMapping.objects.filter(user=user,\n project=project, hit__language_pair=language_pair)\n\n # If there is no current HIT to continue with, find a random HIT for the\n # given user. We keep generating a random block_id in [1, 1000] until we\n # find a matching HIT which the current user has not yet completed.\n if not current_hitmap:\n LOGGER.debug('No current HIT for user {0}, fetching HIT.'.format(\n user))\n \n # Compatible HIT instances need to match the given language pair!\n # Furthermore, they need to be active and not reserved for MTurk.\n hits = HIT.objects.filter(active=True, mturk_only=False,\n completed=False, project=project, language_pair=language_pair)\n \n LOGGER.debug(\"HITs = {0}\".format(hits))\n \n # Compute list of compatible block ids and randomise its order.\n #\n # cfedermann: for WMT14 Matt did not provide block ids anymore.\n # This meant that our shuffled list of block ids only contained\n # [-1, ..., -1] entries; using these to filter and check for\n # respective HIT status is a quadratic increase of redundant work\n # which will take prohibitively long when there is no next HIT.\n #\n # Converting to unique HIT ids will speed up things drastically.\n hit_ids = list(set(hits.values_list('hit_id', flat=True)))\n shuffle(hit_ids)\n LOGGER.debug(\"HIT IDs = {0}\".format(hit_ids))\n \n # Find the next HIT for the current user.\n random_hit = None\n for hit_id in hit_ids:\n for hit in hits.filter(hit_id=hit_id):\n hit_users = list(hit.users.all())\n \n # Check if this HIT is mapped to users. This code prevents\n # that more than MAX_USERS_PER_HIT users complete a HIT.\n for hitmap in UserHITMapping.objects.filter(hit=hit):\n if not hitmap.user in hit_users:\n hit_users.append(hitmap.user)\n \n if not user in hit_users:\n if len(hit_users) < MAX_USERS_PER_HIT:\n random_hit = hit\n break\n \n if random_hit:\n break\n \n # If we still haven't found a next HIT, there simply is none...\n if not random_hit:\n # TODO: We should now investigate if there is any HIT assigned\n # to a user but has not been finished in a certain time span.\n # Such a HIT can be freed and assigned to the current user. \n return None\n \n # Update User/HIT mappings s.t. the system knows about the next HIT.\n current_hitmap = UserHITMapping.objects.create(user=user,\n project=project, hit=random_hit)\n \n # Otherwise, select first match from QuerySet.\n else:\n current_hitmap = current_hitmap[0]\n \n # Sanity check preventing stale User/HIT mappings to screw up things.\n #\n # Before we checked if `len(hit_users) >= 3`.\n hit_users = list(current_hitmap.hit.users.all())\n if user in hit_users or len(hit_users) >= 1 \\\n or not current_hitmap.hit.active:\n LOGGER.debug('Detected stale User/HIT mapping {0}->{1}'.format(\n user, current_hitmap.hit))\n current_hitmap.delete()\n return _compute_next_task_for_user(user, project, language_pair)\n \n LOGGER.debug('User {0} currently working on HIT {1}'.format(user,\n current_hitmap.hit))\n \n return current_hitmap.hit", "def test_get_response_with_retry__error_status(self, mock_get_thread_session):\n mock_requests_error_response = mock.Mock(status_code=500)\n mock_requests_response = mock.Mock(status_code=206)\n mock_requests_session = mock.create_autospec(requests.Session)\n mock_requests_session.get.side_effect = [\n mock_requests_error_response,\n mock_requests_response,\n ]\n mock_get_thread_session.return_value = mock_requests_session\n\n mock_presigned_url_provider = mock.create_autospec(\n download_threads.PresignedUrlProvider\n )\n presigned_url_info = download_threads.PresignedUrlInfo(\n \"foo.txt\", \"synapse.org/foo.txt\", datetime.datetime.utcnow()\n )\n\n mock_presigned_url_provider.get_info.return_value = presigned_url_info\n start = 5\n end = 42\n\n mock_syn = mock.Mock(spec=Synapse)\n mock_executor = mock.Mock(spec=concurrent.futures.Executor)\n downloader = _MultithreadedDownloader(mock_syn, mock_executor, 5)\n assert (start, mock_requests_response) == downloader._get_response_with_retry(\n mock_presigned_url_provider, start, end\n )\n\n expected_get_call_args_list = [\n mock.call(presigned_url_info.url, headers={\"Range\": \"bytes=5-42\"})\n ] * 2\n assert mock_requests_session.get.call_args_list == expected_get_call_args_list", "def retry_after(self):\n return self._retry_after", "def reconnect(self):\n self.test_cmd()\n if not self.check_network: \n self.reset()\n attempt=0\n while not self.check_network and attempt<self.retries:\n self.full_reset()\n attempt+=1", "def run(self):\n self.cycle = 0\n self.error_code = None\n\tsubNumber = 0\n\ttimestarted = time.time() \n\tself.failCount = 0\n while True:\n\t self.cycle += 1\n\t self.seen_per_cycle = 0\n\t self.step = 0\n\n #if not self.running:\n # self.restart()\n # return\n try:\n\t\tif (config.MAX_CYCLES_TILL_QUIT+1 <= self.cycle-self.failCount):\n\t \t if self.error_code == None:\n\t\t\tself.error_code = 'COMPLETE'\n\t\t else:\n\t\t\tself.error_code = self.error_code + \"-C\"\n\t\t return\n\n\t\tcurrentTime = time.time()\n\t\tif (config.SLEEP == 1 and currentTime - timestarted > config.MAX_TIME_AWAKE):\n\t\t\tsubNumber = subNumber + 1\n\t\t\ttimestarted = currentTime\n\t\t\tif (subNumber > utils.getSubMultiplier()):\n\t\t\t\tsubNumber = 0\n\t\telse:\n \tif (self.cycle > 1):\n \t\ttime.sleep(random.randint(30, 60))\n\t\t\telse:\n\t\t\t time.sleep(1)\n\n\t\tif self.failCount >= 3:\n\t \t if self.error_code == None:\n\t\t\tself.error_code = 'STOPPED'\n\t\t else:\n\t\t\tself.error_code = self.error_code + \"-D\"\n\t\t return\n\n self.error_code = None\n\n \tsuccess = self.login(subNumber, self.numActiveAtOnce)\n\n \tif not success:\n\t\t self.failCount = self.failCount + 1\n\t\t time.sleep(3)\n\t\t continue\n\n\t\tlogger.info(\"Logged into: \" + self.username)\t\t\n\n\t\tself.main()\n\n except BannedAccount:\n \tlogger.info(self.username + \" appears to be banned\")\n\t self.error_code = 'BANNED'\n# self.restart(30, 90)\n #return\n\t\tself.failCount = self.failCount + 1\n\t\tcontinue\n\t # this only occurs if it is non fixable, fixable ones are handled where it was running\n except CaptchaAccount:\n\t logger.info(\"Stopping worker as there appear to be no more accounts\")\n\t\t\tself.error_code = self.error_code + \"-X\"\n\t\t\treturn\n except FunkyAccount:\n\t logger.info(\"Stopping worker as this account is being funky\")\n\t\t\tif self.error_code is None:\n\t\t\t\tself.error_code = \"FUNKY\"\n\t\t\telse:\n\t\t\t\tself.error_code = self.error_code + \"-F\"\n\t\t\treturn\n except Exception:\n logger.exception('A wild exception appeared!')\n self.error_code = 'EXCEPTION'\n #self.restart()\n #return\n\t\tself.failCount = self.failCount + 1\n\t\tcontinue\n #if not self.running:\n # self.restart()\n # return\n\t self.failCount = 0\n #if self.cycle <= config.CYCLES_PER_WORKER:\n # logger.info('Going to sleep for a bit')\n # self.error_code = 'SLEEP'\n #self.running = False\n # logger.info('AWAKEN MY MASTERS')\n #self.running = True\n #self.error_code = None\n #self.error_code = 'RESTART'\n #self.restart()", "def test_timeoutReset(self):\n for i in range(3):\n self.circuit_breaker.failure()\n self.time.advance(29.0)\n available29sec = self.circuit_breaker.available()\n self.time.advance(1.1)\n available30sec = self.circuit_breaker.available()\n self.assertEqual((available29sec, available30sec),\n (False, True))", "def test_multipleConcurrentFailure(self):\n resolver = client.Resolver(servers=[('example.com', 53)])\n resolver.protocol = StubDNSDatagramProtocol()\n queries = resolver.protocol.queries\n\n query = dns.Query('foo.example.com', dns.A)\n firstResult = resolver.query(query)\n secondResult = resolver.query(query)\n\n class ExpectedException(Exception):\n pass\n\n queries.pop()[-1].errback(failure.Failure(ExpectedException()))\n\n return defer.gatherResults([\n self.assertFailure(firstResult, ExpectedException),\n self.assertFailure(secondResult, ExpectedException)])", "def test_wait(self, mocker):\n\n tid = 289466\n site = \"mysite\"\n first_response = self.generate_task_dictionary(\n tid, state=\"waiting\", completed=False\n )\n\n responses = [\n {\"json\": first_response},\n {\"json\": self.generate_task_dictionary(tid)},\n ]\n url = (\n \"https://cloudapi.acquia.com/v1/\"\n \"sites/prod:{site}/tasks/{tid}.json\".format(tid=tid, site=site)\n )\n\n mocker.register_uri(\"GET\", url, responses)\n\n task = self.client.site(site).task(tid).wait()\n self.assertEqual(task[\"id\"], tid)\n self.assertEqual(task[\"state\"], \"done\")", "def test_call_second_time(self, query_repo_url, get_credentials, valid_revision, get):\n # Making sure the cache is filled so we don't depend on the order of the tests.\n query_jobs.JOBS_CACHE[(\"try\", \"146071751b1e\")] = json.loads(JOBS_SCHEDULE)\n self.assertEquals(\n self.query_api._get_all_jobs(\"try\", \"146071751b1e\"),\n json.loads(JOBS_SCHEDULE))\n # _get_all_jobs should return its value directly from\n # cache without calling get\n assert get.call_count == 0", "def retry_connect(redis_cfg, tries=300, base_delay=4.):\n for i in range(tries):\n try:\n r = redis.StrictRedis(**redis_cfg)\n r.ping()\n return r\n except redis.ConnectionError as e:\n if i == tries - 1:\n raise\n else:\n delay = base_delay * (1 + (os.getpid() % 10) / 9)\n print(f'WARNING: could not connect to {redis_cfg}. Retrying after {delay} sec ({i+2}/{tries}). Error {e}')\n time.sleep(delay)", "def __init__(self, tries , exceptions=None, delay=0.01):\n self.tries = tries\n if exceptions is None:\n exceptions = Retry.default_exceptions\n self.exceptions = exceptions\n self.delay = delay", "def test_returns_easy_projects_if_difficulty_set_to_easy(self):\n # Arrange\n self.test_project_2.private = False\n # Set difficulty of test_project_2 to easy.\n self.test_project_2.difficulty = ProjectDifficulty.EASY.value\n self.test_project_2.save()\n # Act\n response = self.client.get(\n self.url,\n headers={\"Authorization\": self.user_session_token},\n query_string={\"difficulty\": \"EASY\"},\n )\n # Assert\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(response.json[\"results\"]), 1)\n self.assertEqual(\n response.json[\"results\"][0][\"projectId\"], self.test_project_2.id\n )", "def check_completion(self):\n\n time.sleep(3)\n while self.status == 0:\n pass", "def test_user_enroll_to_lesson_automatically_enroll_to_parent_project(self):\n #get lessons that their parent project is not enrolled:\n project_lesson_not_enrolled_qs = Lesson.objects.exclude(\n Q(id__in=self.lesson_enrolled_qs) |\n Q(project__id__in=Project.objects.filter(registrations__user=self.global_user_1))\n )\n\n ### lesson of published project:\n\n #get a lesson that its parent project is not enrolled and published:\n project_lesson_not_enrolled_published_obj = project_lesson_not_enrolled_qs.filter(\n project__publish_mode=Project.PUBLISH_MODE_PUBLISHED\n ).first()\n\n self.client.force_authenticate(self.global_user_1)\n\n #make sure nothing is found - not enrolled.\n api_project_lesson_not_enrolled_state = reverse('api:project-lesson-state-detail', kwargs={\n 'project_pk': project_lesson_not_enrolled_published_obj.project_id,\n 'lesson_pk': project_lesson_not_enrolled_published_obj.id\n })\n resp = self.client.get(api_project_lesson_not_enrolled_state)\n self.assertEqual(resp.status_code, 404)\n api_project_not_enrolled_state = reverse('api:project-state-detail', kwargs={\n 'project_pk': project_lesson_not_enrolled_published_obj.project_id\n })\n resp = self.client.get(api_project_not_enrolled_state)\n self.assertEqual(resp.status_code, 404)\n\n #POST and make sure the user is enrolled\n resp = self.client.post(api_project_lesson_not_enrolled_state)\n self.assertIn(resp.status_code, xrange(200, 202))\n resp = self.client.get(api_project_lesson_not_enrolled_state)\n self.assertEqual(resp.status_code, 200)\n\n #make sure the parent project state was automatically created\n resp = self.client.get(api_project_not_enrolled_state)\n self.assertEqual(resp.status_code, 200)\n\n #lesson of unpublished project:\n\n #get a lesson that its parent project is not enrolled and unpublished:\n project_lesson_not_enrolled_unpublished_obj = project_lesson_not_enrolled_qs.filter(\n project__publish_mode=Project.PUBLISH_MODE_EDIT,\n project__owner=self.global_user_1,\n ).first()\n\n #make sure nothing is found - not enrolled.\n api_project_lesson_not_enrolled_state = reverse('api:project-lesson-state-detail', kwargs={\n 'project_pk': project_lesson_not_enrolled_unpublished_obj.project_id,\n 'lesson_pk': project_lesson_not_enrolled_unpublished_obj.id\n })\n resp = self.client.get(api_project_lesson_not_enrolled_state)\n self.assertEqual(resp.status_code, 404)\n api_project_not_enrolled_state = reverse('api:project-state-detail', kwargs={\n 'project_pk': project_lesson_not_enrolled_unpublished_obj.project_id\n })\n resp = self.client.get(api_project_not_enrolled_state)\n self.assertEqual(resp.status_code, 404)\n\n #NOTE: Allowed!\n # #POST and make sure the user is enrolled\n # resp = self.client.post(api_project_lesson_not_enrolled_state)\n # self.assertEqual(resp.status_code, 403)\n # resp = self.client.get(api_project_lesson_not_enrolled_state)\n # self.assertEqual(resp.status_code, 404)\n #\n # #make sure the parent project state was automatically created\n # resp = self.client.get(api_project_not_enrolled_state)\n # self.assertEqual(resp.status_code, 404)", "def wait_for(func, expected_exceptions=(), retries=60):\n\n retries = int(retries)\n for retry in range(1, retries + 1):\n try:\n return_value = func()\n if return_value:\n break\n\n except expected_exceptions:\n if retry == retries:\n raise\n else:\n pass\n\n time.sleep(1)\n\n return return_value", "def test_different_project_name_with_similar_state(self):\n self.testcases[0].security_flag = False\n self.testcases[0].crash_type = 'Heap-buffer-overflow'\n self.testcases[0].crash_state = 'abcdef'\n self.testcases[0].project_name = 'project1'\n self.testcases[1].security_flag = False\n self.testcases[1].crash_type = 'Heap-buffer-overflow'\n self.testcases[1].crash_state = 'abcde'\n self.testcases[1].project_name = 'project2'\n\n for t in self.testcases:\n t.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def test_swact_attempt_timeout(self):\n\n # mock the get_host queries\n # all remaining queries, the host returns 'Controller-Standby'\n self.sysinv_client.get_host.side_effect = itertools.chain(\n itertools.repeat(self.CONTROLLER_STANDBY))\n\n # mock the API call as successful on the subcloud\n self.sysinv_client.swact_host.return_value = self.CONTROLLER_SWACTING\n\n # invoke the strategy state operation on the orch thread\n self.worker.perform_state_action(self.strategy_step)\n\n # verify the swact command was actually attempted\n self.sysinv_client.swact_host.assert_called()\n\n # verify the query was invoked: 1 + max_attempts times\n self.assertEqual(swact_host.DEFAULT_MAX_QUERIES + 2,\n self.sysinv_client.get_host.call_count)\n\n # verify that state failed due to subcloud never finishing the swact\n self.assert_step_updated(self.strategy_step.subcloud_id,\n consts.STRATEGY_STATE_FAILED)", "def test_call_first_time(self, query_repo_url, get_credentials, valid_revision, get):\n self.assertEquals(\n self.query_api._get_all_jobs(\"try\", \"146071751b1e\"),\n json.loads(JOBS_SCHEDULE))\n\n assert get.call_count == 1\n\n # Test that this fills our caches\n self.assertEquals(\n query_jobs.JOBS_CACHE[(\"try\", \"146071751b1e\")],\n json.loads(JOBS_SCHEDULE))", "def test_get_projects(self):\n for project in ['TEST', 'NEWTEST', 'MYPROJECT']:\n self.db.insert_single_result(generate_mock_result(project=project))\n projects = self.db.get_projects()\n self.assertItemsEqual(['MYPROJECT', 'NEWTEST', 'TEST'], projects)", "def _worker(self):\n player = self.first_player # first player\n q = [None, self.q1, self.q2]\n states = [None, 'reset', 'reset']\n not_ret = None\n while True:\n op, a, ret = q[player].get()\n # print(f'worker: player: {player} op: {op}')\n try:\n assert op == states[\n player], f'state {op} expects {states[player]}'\n\n if op == 'reset':\n if player == self.first_player:\n # first time case\n s = self.env.reset()\n # set trace\n self.trace.set_init(self.env)\n legal_moves = self.env.legal_moves()\n ret.set((s, None, None, None, legal_moves))\n states[player] = 'step'\n else:\n states[player] = 'step'\n\n # switch player\n if player == 1:\n player = 2\n else:\n player = 1\n not_ret = ret\n elif op == 'step':\n # handle error\n s, r, done, info = self.env.step(a)\n # keep trace\n self.trace.step(a)\n\n if done:\n ret.set((s, r, done, info, []))\n not_ret.set((s, -r, done, info, []))\n # end\n self.done = True\n break\n else:\n legal_moves = self.env.legal_moves()\n not_ret.set((s, -r, done, info, legal_moves))\n\n # switch player\n if player == 1:\n player = 2\n else:\n player = 1\n not_ret = ret\n else:\n raise NotImplementedError()\n except Exception as e:\n # exception\n # return as exception\n ret.set(e)\n if not_ret is not None:\n not_ret.set(\n Exception('your opponent did something unexpected'))\n self._exception(\n Exception('something else terminated the environment'))\n break", "def test_profile_requests_are_retried(self) -> None:\n has_failed_once = False\n\n async def get_remote_profile(\n user_id: str, ignore_backoff: bool = True\n ) -> JsonDict:\n nonlocal has_failed_once\n if user_id == \"@bruce:remote\":\n if not has_failed_once:\n has_failed_once = True\n raise SynapseError(502, \"temporary network problem\")\n\n return {\n \"displayname\": \"Sir Bruce Bruceson\",\n \"avatar_url\": \"mxc://remote/789\",\n }\n else:\n raise ValueError(f\"unable to fetch {user_id}\")\n\n with patch.object(self.profile_handler, \"get_profile\", get_remote_profile):\n # Continue from the earlier test...\n self.test_private_rooms_do_not_have_profiles_collected()\n\n # Advance by a minute\n self.reactor.advance(61.0)\n\n # The request has already failed once\n self.assertTrue(has_failed_once)\n\n # The profile has yet to be updated.\n profiles = self.get_success(\n self.user_dir_helper.get_profiles_in_user_directory()\n )\n self.assertNotIn(\n \"@bruce:remote\",\n profiles,\n )\n\n # Advance by five minutes, after the backoff has finished\n self.reactor.advance(301.0)\n\n # The profile should have been updated now\n profiles = self.get_success(\n self.user_dir_helper.get_profiles_in_user_directory()\n )\n self.assertEqual(\n profiles.get(\"@bruce:remote\"),\n ProfileInfo(\n display_name=\"Sir Bruce Bruceson\", avatar_url=\"mxc://remote/789\"\n ),\n )", "def _poll_until_no_exception(self, fn, expected_exception, max_retries=20, retry_delay=3):\n\n for i in range(max_retries):\n try:\n return fn()\n except expected_exception:\n if i == max_retries - 1:\n raise\n if self.is_live:\n time.sleep(retry_delay)", "def until_project_in_state(self, project, states, timeout=None):\n timeout = timeout or self.timeout(fail=True)\n\n while not timeout and not self.project_in_state(project, states):\n time.sleep(2)\n\n return self.project_in_state(project, states)", "def testTrialErrored2(self):\n stats = self.default_statistics()\n trial_count = stats[str(0)][\"n\"] + stats[str(1)][\"n\"]\n sched, mock_runner = self.schedulerSetup(trial_count)\n trials = sched._state[\"bracket\"].current_trials()\n for t in trials[:-1]:\n mock_runner._launch_trial(t)\n sched.on_trial_result(\n mock_runner, t, result(stats[str(1)][\"r\"], 10))\n\n mock_runner._launch_trial(trials[-1])\n sched.on_trial_error(mock_runner, trials[-1])\n self.assertEqual(len(sched._state[\"bracket\"].current_trials()),\n self.downscale(stats[str(1)][\"n\"], sched))", "def _is_eligible_to_retry(task_instance):\n return task_instance.try_number <= task_instance.max_tries", "def retry(retries, task_f, check_f=bool, wait_f=None):\n for attempt in range(retries):\n ret = task_f()\n if check_f(ret):\n return ret\n if attempt < retries - 1 and wait_f is not None:\n wait_f(attempt)\n raise RetryException(\"Giving up after {} failed attempt(s)\".format(retries))", "def _run_notice_event(look_for_work):\n while True:\n try:\n found = look_for_work()\n if not found:\n break\n except ConcurrentUpdate as e:\n # retry if we had a race-condition while claiming work\n sys.stderr.write('Handling ErmrestConcurrentUpdate exception...\\n')\n pass", "def test_loopFailure_noRecovery(self):\n dbpool, _ignore_qpool, clock, _ignore_performerChosen = self._setupPools()\n fakeNow = datetime.datetime(2012, 12, 12, 12, 12, 12)\n\n oldNextJob = JobItem.nextjob\n @inlineCallbacks\n def _nextJob(cls, txn, now, minPriority, rowLimit):\n job = yield oldNextJob(txn, now, minPriority, rowLimit)\n work = yield job.workItem()\n if work.a == -2:\n raise ValueError(\"oops\")\n\n self.patch(JobItem, \"nextjob\", classmethod(_nextJob))\n\n # Let's create a couple of work items directly, not via the enqueue\n # method, so that they exist but nobody will try to immediately execute\n # them.\n\n @transactionally(dbpool.pool.connection)\n @inlineCallbacks\n def setup(txn):\n # Failing\n yield DummyWorkItem.makeJob(\n txn, a=-2, b=1, notBefore=fakeNow - datetime.timedelta(20 * 60)\n )\n # OK\n yield DummyWorkItem.makeJob(\n txn, a=1, b=0, notBefore=fakeNow - datetime.timedelta(20 * 60, 5)\n )\n yield setup\n clock.advance(20 - 12)\n\n @transactionally(dbpool.pool.connection)\n def check(txn):\n return JobItem.all(txn)\n\n jobs = yield check\n self.assertEqual(len(jobs), 2)\n self.assertEqual(jobs[0].assigned, None)\n self.assertEqual(jobs[0].isAssigned, 0)\n self.assertEqual(jobs[0].failed, 0)\n self.assertEqual(jobs[0].notBefore, fakeNow - datetime.timedelta(20 * 60))\n self.assertEqual(jobs[1].assigned, None)\n self.assertEqual(jobs[1].isAssigned, 0)\n self.assertEqual(jobs[1].failed, 0)\n self.assertEqual(jobs[1].notBefore, fakeNow - datetime.timedelta(20 * 60, 5))", "def retry_branch_off(branch_id):\n base_url = BRANCHES_SETTINGS[branch_id]['base_url']\n pump_enabled = BRANCHES_SETTINGS[branch_id]['pump_enabled']\n\n try:\n for attempt in range(2):\n try:\n if base_url is None:\n response_off = garden_controller.branch_off(branch_id=branch_id, pump_enable=pump_enabled)\n logging.info('response {0}'.format(response_off))\n\n if (response_off[branch_id]['state'] != 0):\n logging.error('Branch {0} cant be turned off. response {1}'.format(branch_id, str(response_off)))\n time.sleep(2)\n continue\n else:\n logging.info('Branch {0} is turned off by rule'.format(branch_id))\n return response_off\n else:\n response_off = requests.get(url=base_url, params={'branch_id': branch_id})\n logging.info('response {0}'.format(str(response_off)))\n\n if (response_off[branch_id]['state'] != 0):\n logging.error('Branch {0} cant be turned off. response {1}'.format(branch_id, response_off))\n time.sleep(2)\n continue\n else:\n logging.info('Branch {0} is turned off by rule'.format(branch_id))\n return response_off\n except Exception as e:\n logging.error(e)\n logging.error(\"Can't turn off {0} branch. Exception occured. {1} try out of 2\".format(branch_id, attempt))\n time.sleep(2)\n continue\n\n raise Exception(\"Can't turn off {0} branch. Retries limit reached\".format(branch_id))\n except Exception as e:\n logging.error(e)\n logging.error(\"Can't turn off branch id={0}. Exception occured\".format(branch_id))\n raise Exception(\"Can't turn off {0} branch\".format(branch_id))", "def test_loopFailure_failedRecovery(self):\n dbpool, _ignore_qpool, clock, _ignore_performerChosen = self._setupPools()\n fakeNow = datetime.datetime(2012, 12, 12, 12, 12, 12)\n\n oldAssign = JobItem.assign\n @inlineCallbacks\n def _assign(self, when, overdue):\n work = yield self.workItem()\n if work.a == -2:\n raise ValueError(\"oops\")\n yield oldAssign(self, when, overdue)\n\n self.patch(JobItem, \"assign\", _assign)\n\n @inlineCallbacks\n def _failedToRun(self, locked=False, delay=None):\n raise ValueError(\"oops\")\n\n self.patch(JobItem, \"failedToRun\", _failedToRun)\n\n # Let's create a couple of work items directly, not via the enqueue\n # method, so that they exist but nobody will try to immediately execute\n # them.\n\n @transactionally(dbpool.pool.connection)\n @inlineCallbacks\n def setup(txn):\n # Failing\n yield DummyWorkItem.makeJob(\n txn, a=-2, b=1, notBefore=fakeNow - datetime.timedelta(20 * 60)\n )\n # OK\n yield DummyWorkItem.makeJob(\n txn, a=1, b=0, notBefore=fakeNow - datetime.timedelta(20 * 60, 5)\n )\n yield setup\n clock.advance(20 - 12)\n\n @transactionally(dbpool.pool.connection)\n def check(txn):\n return JobItem.all(txn)\n\n jobs = yield check\n self.assertEqual(len(jobs), 2)\n self.assertEqual(jobs[0].assigned, None)\n self.assertEqual(jobs[0].isAssigned, 0)\n self.assertEqual(jobs[0].failed, 0)\n self.assertEqual(jobs[0].notBefore, fakeNow - datetime.timedelta(20 * 60))\n self.assertEqual(jobs[1].assigned, None)\n self.assertEqual(jobs[1].isAssigned, 0)\n self.assertEqual(jobs[1].failed, 0)\n self.assertEqual(jobs[1].notBefore, fakeNow - datetime.timedelta(20 * 60, 5))", "def test_task_retry_and_succeed_56_tasks(self):\n all_done = []\n number = 56\n for x in range(number):\n mock_task = MockFailOnceTask()\n all_done.append(mock_task.done)\n self.measurementManager.schedule(mock_task)\n\n d = defer.DeferredList(all_done)\n\n @d.addCallback\n def done(res):\n self.assertEqual(self.measurementManager.failures, number)\n #self.assertEqual(len(self.measurementManager.failures), number)\n for task_result, task_instance in self.measurementManager.successes:\n self.assertEqual(task_result, 42)\n self.assertIsInstance(task_instance, MockFailOnceTask)\n\n return d", "def libvirt_retry(self, op):\n end_time = time.time() + 30.0\n ignore = [\n # libvirt connection closed for some reason, just retry\n \"Unable to read from monitor: Connection reset by peer\",\n # lxc container starting often fails as they're started\n # simultaneously with the same device names, use a unique\n # name to work around it.\n # http://www.redhat.com/archives/libvir-list/2013-August/msg01475.html\n \"RTNETLINK answers: File exists\",\n ]\n while True:\n try:\n return op()\n except libvirt.libvirtError as error:\n if not any(ignorable in str(error) for ignorable in ignore):\n # some other error, raise immediately\n raise\n\n time_left = max(end_time - time.time(), 0)\n if not time_left:\n # timeout\n raise\n\n self.log.warning(\"got possibly transient error '%s' from libvirt, retrying for %.1fs...\",\n error, time_left)\n time.sleep(1.0)", "def next_problem(self, _data):\r\n self.update_task_states()\r\n return {'success': True, 'html': self.get_html_nonsystem(), 'allow_reset': self.ready_to_reset}", "def next_parameters(self):\n\n if self.number_of_runs == -1 or self.runs_performed < self.number_of_runs:\n self.runs_performed += 1\n _log.debug('%d runs performed (calls to `next_parameters()`)' % self.runs_performed)\n else:\n _log.info('No more parameters to test in the database.')\n return None\n \n records = self.db.get_table()\n _log.debug('Retrieved %d parameters' % len(records))\n\n # Do we have a last-test in the config file\n if self.config and \"last-test\" in self.config and self.config[\"last-test\"]:\n _log.info('Using `last-test` with id=\"%s\" from config.txt' %\n str(self.config[\"last-test\"]))\n for i in range(0, len(records)):\n if (\n str(self.config.config[\"last-test\"]) == str(records[i][\"id\"]) and\n records[i][\"status\"] != \"successful\"\n ):\n records[i][\"status\"] = \"in progress\"\n if \"start-time\" in records[i]:\n records[i][\"start-time\"] = \\\n datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n if \"performed-by\" in records[i]:\n records[i][\"performed-by\"] = self.performed_by\n\n self.db.update_row(i, records[i])\n\n return records[i]\n\n for i in range(0, len(records)):\n if not len(records[i][\"status\"]):\n if (\n 'computer-strength' in records[i] and \n self.computer_strength < int(records[i][\"computer-strength\"])\n ):\n continue\n \n records[i][\"status\"] = \"in progress\"\n if \"start-time\" in records[i]:\n records[i][\"start-time\"] = \\\n datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n if \"performed-by\" in records[i]:\n records[i][\"performed-by\"] = self.performed_by\n self.db.update_row(i, records[i])\n\n # Save id to local cache\n if self.config:\n self.config.update(key='last-test', value=str(records[i][\"id\"]))\n\n return records[i]\n\n return None", "def test_returns_moderate_projects_if_difficulty_set_to_moderate(self):\n # Arrange\n self.test_project_2.private = False\n # Change difficulty of test_project_2 to easy so that it is not returned.\n self.test_project_2.difficulty = ProjectDifficulty.EASY.value\n # Act\n response = self.client.get(\n self.url,\n headers={\"Authorization\": self.user_session_token},\n query_string={\"difficulty\": \"MODERATE\"},\n )\n # User is only permitted to map test_project_1 and test_project_2, since test_project_3 is DRAFT.\n # So we should get only test_project_1 as it is the only project with difficulty set to MODERATE.\n # Assert\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(response.json[\"results\"]), 1)\n self.assertEqual(\n response.json[\"results\"][0][\"projectId\"], self.test_project_1.id\n )", "def get_url():\r\n songs = []\r\n with open(FILE_CONTAINING_URLS) as f:\r\n for line in f:\r\n if not line.startswith(\"#\") and is_web_url(line):\r\n songs.append(line)\r\n\r\n # pick a random song and store it in song variable\r\n song = random.choice(songs)\r\n\r\n url_attempts = []\r\n\r\n for x in range(RETRY_COUNT):\r\n response = requests.get(song)\r\n # check if URL is valid and also make sure video is available\r\n if response.ok and video_is_available(song):\r\n return song\r\n # store failed URL\r\n url_attempts.append(song)\r\n # choose new random song\r\n song = random.choice(songs)\r\n\r\n print(\"Could not access video URLs. Please check network connection\")\r\n print(\"Tried the following URLs before failing:\")\r\n print(\"\\n\".join(url_attempts))\r\n exit(1)", "def test_wait_for_predicate_second_false(self):\n predicate_mock = mock.MagicMock(side_effect=[True, False])\n # 10 retry limit to avoid a near infinite loop on an error.\n train_utils.wait_for_predicate(predicate_mock, num_retries=10)\n self.assertEqual(predicate_mock.call_count, 2)", "def get_next_if_any(self):\n try:\n ret = self.work[deepcopy(self.i)]\n self.i += 1\n # print \"Trickling item\", self.i\n return ret\n except Exception:\n return None", "def test_solve_one_player_3(self):\n self.rush_hour_data = rush_hour_data_3\n self.state_data = state_data_3\n self.execute_minimax_single_player()", "def retry_on_exception(func, max_attempts=5, ignored_exceptions=(StaleElementReferenceException, InvalidElementStateException)):\r\n attempt = 0\r\n while attempt < max_attempts:\r\n try:\r\n return func()\r\n except ignored_exceptions:\r\n world.wait(1)\r\n attempt += 1\r\n\r\n assert_true(attempt < max_attempts, 'Ran out of attempts to execute {}'.format(func))", "def testTurbiniaWait(self, mock_get_request_status, _):\n mock_api_instance = mock.MagicMock()\n mock_api_instance.create_request = mock_get_request_status\n self.turbinia_processor.requests_api_instance = mock_api_instance\n mock_get_request_status.return_value = self._request_status\n for task, path in self.turbinia_processor.TurbiniaWait(TASK_ID):\n # Check that the task and path are correct for a PlasoParserTask\n if task[\"id\"] == TASK_ID:\n self.assertEqual(task, self._request_status[\"tasks\"][0])\n self.assertEqual(path, TEST_TASK_PATH)\n break", "def get_page_retry(self, *a, **b):\n\t\tretry_count = int(b.pop('retry', 3))\n\t\ttimeout = int(b.pop('timeout', 1))\n\t\t\n\t\tfor i in xrange(retry_count):\n\t\t\tif i != 0:\n\t\t\t\tprint 'Will retry (%d) in %d seconds' % (i, timeout)\n\t\t\t\tself.reset()\n\t\t\t\ttime.sleep(timeout)\n\t\t\ttry:\n\t\t\t\ttext = self.get_page(*a, **b)\n\t\t\texcept socket.error, e:\n\t\t\t\tprint 'Socket failure', str(e)\n\t\t\t\tcontinue\n\t\t\tif '<H2>The requested URL could not be retrieved</H2>' in text:\n\t\t\t\treason = \"unknown reason\"\n\t\t\t\tfor excuse in (\n\t\t\t\t\t'(104) Connection reset by peer',\n\t\t\t\t\t'(111) Connection refused',\n\t\t\t\t\t):\n\t\t\t\t\tif excuse in text:\n\t\t\t\t\t\treason = excuse\n\t\t\t\t\t\tbreak\n\t\t\t\tprint \"SQUID failed because %r\" % (reason,)\n\t\t\t\tcontinue\n\t\t\treturn text\n\t\t# for loop failed:\n\t\traise TooManyRetries(\"Too many tries to download page - socket error\")" ]
[ "0.6727273", "0.66801494", "0.61674005", "0.6153633", "0.59194636", "0.5918431", "0.5909713", "0.58628047", "0.5764597", "0.5757618", "0.5737513", "0.5729122", "0.5724581", "0.56619436", "0.5641783", "0.55695194", "0.55670893", "0.555651", "0.5526606", "0.5525379", "0.5483605", "0.5440648", "0.5415864", "0.54151237", "0.53782326", "0.5355858", "0.53551126", "0.5345179", "0.5322792", "0.5320428", "0.52996236", "0.5295803", "0.5267943", "0.5249999", "0.5230637", "0.52186894", "0.52170455", "0.5183402", "0.5180328", "0.517982", "0.51736426", "0.5163567", "0.51609427", "0.5143398", "0.5141568", "0.5126612", "0.5123587", "0.5123587", "0.5108861", "0.5108313", "0.50954866", "0.50916344", "0.50878125", "0.5083535", "0.5079033", "0.5078204", "0.5076111", "0.50595397", "0.5052012", "0.50443566", "0.50417167", "0.5041569", "0.5037113", "0.50330114", "0.50328743", "0.5023085", "0.50208855", "0.5017152", "0.50064903", "0.50009525", "0.49981186", "0.4996425", "0.49877182", "0.4975936", "0.497354", "0.49713072", "0.49693403", "0.49684328", "0.4966361", "0.49574715", "0.49520388", "0.49467435", "0.4935377", "0.49202922", "0.4919079", "0.4916227", "0.4913958", "0.49122515", "0.49068627", "0.49028346", "0.49019474", "0.48964727", "0.48920768", "0.48900682", "0.48881832", "0.4884666", "0.48830178", "0.4877462", "0.48772112", "0.48728922" ]
0.5577417
15
Should fail to get internal project and return None after eventually timing out.
def test_get_internal_project_fail( self, sleep: MagicMock, time_time: MagicMock, internal_project: PropertyMock ): project = exposed.ExposedProject() time_time.side_effect = range(20) internal_project.return_value = None result = project.get_internal_project() self.assertIsNone(result) self.assertEqual(10, sleep.call_count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_internal_project(\n self,\n sleep: MagicMock,\n internal_project: PropertyMock\n ):\n project = exposed.ExposedProject()\n internal_project.side_effect = [None, None, None, 'test']\n result = project.get_internal_project()\n self.assertEqual('test', result)\n self.assertEqual(2, sleep.call_count)", "def test_get_project(self):\n pass", "def get_project(con):\n try:\n return con.project_read(fq_name=conf.get('default_project', 'UNEXPECTED_VALUE'))\n except:\n log.debug('Unable to find project default-domain, admin:', exc_info=True)\n return None", "def Project(self):\n\n if not self.connected:\n return None\n\n try:\n return _ReadNoProxy(GOOGLE_GCE_METADATA_PROJECT_URI)\n except urllib2.HTTPError as e:\n raise MetadataServerException(e)\n except urllib2.URLError as e:\n raise CannotConnectToMetadataServerException(e)", "def test_get_project(self):\n self.assertEqual(self.remote_project.get_project(), self.project)", "def get_current_project():\n return get_from_session(KEY_PROJECT)", "def test_project_stop_aborted(self, get_internal_project: MagicMock):\n get_internal_project.return_value = None\n ep = exposed.ExposedProject()\n ep.stop()", "def get_project(self, name=None):\n if not name:\n if not self.select_project:\n log.error(\"no default project name specified\")\n return\n name = self.select_project\n\n if name in self.projects:\n return self.projects[name]\n\n log.debug( \"project {} not found in {} projects \".format(name, len(self.projects)) )\n return None", "def get_project(self):\n raise NotImplementedError(\"get_project is not implemented\")", "def getMain(self):\n\n if self.__projects:\n return self.__projects[0]\n else:\n return None", "def test_no_such_project(self):\n project = cd.project.get_internal_project()\n cd.project.load(None)\n\n with self.assertRaises(Exception):\n self.run_step('FAKE')\n\n cd.project.load(project)", "def test_get_projects(self):\n pass", "def get_project(self, id):\n for project in self.projects:\n if project.id == int(id):\n ret_val = project\n break\n else:\n ret_val = None\n\n return ret_val", "def get_project(db, id):\n \n for element in db:\n if element['project_no'] == id:\n return element\n return None", "def get_project(self, project_name):\n raise self._get_notimplementederror(\"get_project\")", "def test_read_project(self):\n pass", "def test_read_project(self):\n pass", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\n \"/api/v2/projects/queries/999/similar-projects/\",\n headers={\"Authorization\": self.user_session_token},\n )\n self.assertEqual(response.status_code, 404)", "def GetProject(args):\n return args.project or properties.VALUES.core.project.GetOrFail()", "def get_current_project(self):\n\n try:\n command = self._oc_command([\"project\", \"-q\"])\n output = run_cmd(command, return_output=True)\n except subprocess.CalledProcessError as ex:\n raise ConuException(\"Failed to obtain current project name : %s\" % ex)\n\n try:\n return output.rstrip() # remove '\\n'\n except IndexError:\n raise ConuException(\"Failed to obtain project name\")", "def test_get_status_no_project(\n get_internal_project: MagicMock,\n step_writer_serialize: MagicMock,\n):\n get_internal_project.return_value = None\n\n response = statuses.get_status(0, force=True)\n\n assert response['success'], \"\"\"\n Expect the status process to be successful.\n \"\"\"\n assert response['data']['project'] is None, \"\"\"\n Expect there to be no project data.\n \"\"\"\n assert 0 == step_writer_serialize.call_count, \"\"\"\n Expect no step serialization to be carried out.\n \"\"\"\n assert [] == response['data']['step_changes'], \"\"\"\n Expect no step changes to exist without project data.\n \"\"\"\n assert response['hash'].startswith('forced-'), \"\"\"\n Expect a forced call to have a forced hash.\n \"\"\"", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/aoi/\")\n self.assertEqual(response.status_code, 404)", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\n \"/api/v2/projects/999/queries/notasks/\",\n headers={\"Authorization\": self.user_session_token},\n )\n self.assertEqual(response.status_code, 404)", "def test_projects_get(self):\n response = self.client.open('/project-tracker/projects',\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def get(self, name):\n try:\n return self.projects[name]\n except KeyError:\n print(\"No project called %s was found\" %name)", "def __get_project_version__(self):\n api = FortifyApi(self.ssc_server, token=self.token, verify_ssl=False)\n try:\n response = api.get_project_versions() # api should support a search expression here. alas...\n if response.success:\n for project_version in response.data['data']:\n if project_version['project']['name'] == self.application_name:\n if project_version['name'] == self.fortify_version:\n # we have a matching project version\n Logger.app.debug(\"Found existing project version {0}\".format(project_version['id']))\n return project_version['id']\n # Didn't find a matching project version, verify that our project exists\n for project_version in response.data['data']:\n if project_version['project']['name'] == self.application_name:\n # Our project exsits, so create a new version\n return self.__create_project_version__()\n # Let upload_scan know that our project doesn't exist\n return -2\n elif \"401\" in response.message:\n # Avoid printing error for invalid token. Return -1 to reauth\n return -1\n else:\n Logger.app.critical(\"Failed to get project version. {0}\".format(response.message))\n except Exception as e:\n Logger.app.critical(\"Exception trying to get project version. {0}\".format(e.message))\n\n return None", "def _determine_default_project(project=None):\n if project is None:\n project = _get_gcd_project()\n\n if project is None:\n project = _helpers._determine_default_project(project=project)\n\n return project", "def get_project(self):\n if self.api_version == 2:\n return self.creds.get('tenant_id') or self.creds.get('tenant_name')\n else:\n return self.creds.get('project_id') or self.creds.get('project_name')", "def test_get_projects_throws_if_project_does_not_exist(fc: fetcher.Fetcher):\n with pytest.raises(exceptions.NotFoundError) as exc:\n fc.get_projects(\"BadProject\")\n assert \"An error occured while getting projects.\" in str(exc.value)", "def get_project(arn=None):\n pass", "def _fetch_project_local(self, internal_api: Api) -> None:\n # these asserts are all guaranteed to pass, but are required by mypy\n assert self.source != LaunchSource.LOCAL and self.source != LaunchSource.JOB\n assert isinstance(self.uri, str)\n assert self.project_dir is not None\n _logger.info(\"Fetching project locally...\")\n if utils._is_wandb_uri(self.uri):\n source_entity, source_project, source_run_name = utils.parse_wandb_uri(\n self.uri\n )\n run_info = utils.fetch_wandb_project_run_info(\n source_entity, source_project, source_run_name, internal_api\n )\n program_name = run_info.get(\"codePath\") or run_info[\"program\"]\n\n if run_info.get(\"cudaVersion\"):\n original_cuda_version = \".\".join(run_info[\"cudaVersion\"].split(\".\")[:2])\n\n if self.cuda is None:\n # only set cuda on by default if cuda is None (unspecified), not False (user specifically requested cpu image)\n wandb.termlog(\n f\"{LOG_PREFIX}Original wandb run {source_run_name} was run with cuda version {original_cuda_version}. Enabling cuda builds by default; to build on a CPU-only image, run again with --cuda=False\"\n )\n self.cuda_version = original_cuda_version\n self.cuda = True\n if (\n self.cuda\n and self.cuda_version\n and self.cuda_version != original_cuda_version\n ):\n wandb.termlog(\n f\"{LOG_PREFIX}Specified cuda version {self.cuda_version} differs from original cuda version {original_cuda_version}. Running with specified version {self.cuda_version}\"\n )\n # Specify the python runtime for jupyter2docker\n self.python_version = run_info.get(\"python\", \"3\")\n\n downloaded_code_artifact = utils.check_and_download_code_artifacts(\n source_entity,\n source_project,\n source_run_name,\n internal_api,\n self.project_dir,\n )\n if downloaded_code_artifact:\n self._image_tag = binascii.hexlify(\n downloaded_code_artifact.digest.encode()\n ).decode()\n else:\n if not run_info[\"git\"]:\n raise LaunchError(\n \"Reproducing a run requires either an associated git repo or a code artifact logged with `run.log_code()`\"\n )\n branch_name = utils._fetch_git_repo(\n self.project_dir,\n run_info[\"git\"][\"remote\"],\n run_info[\"git\"][\"commit\"],\n )\n if self.git_version is None:\n self.git_version = branch_name\n patch = utils.fetch_project_diff(\n source_entity, source_project, source_run_name, internal_api\n )\n tag_string = run_info[\"git\"][\"remote\"] + run_info[\"git\"][\"commit\"]\n if patch:\n utils.apply_patch(patch, self.project_dir)\n tag_string += patch\n\n self._image_tag = binascii.hexlify(tag_string.encode()).decode()\n\n # For cases where the entry point wasn't checked into git\n if not os.path.exists(os.path.join(self.project_dir, program_name)):\n downloaded_entrypoint = utils.download_entry_point(\n source_entity,\n source_project,\n source_run_name,\n internal_api,\n program_name,\n self.project_dir,\n )\n if not downloaded_entrypoint:\n raise LaunchError(\n f\"Entrypoint file: {program_name} does not exist, \"\n \"and could not be downloaded. Please specify the entrypoint for this run.\"\n )\n\n if (\n \"_session_history.ipynb\" in os.listdir(self.project_dir)\n or \".ipynb\" in program_name\n ):\n program_name = utils.convert_jupyter_notebook_to_script(\n program_name, self.project_dir\n )\n\n # Download any frozen requirements\n utils.download_wandb_python_deps(\n source_entity,\n source_project,\n source_run_name,\n internal_api,\n self.project_dir,\n )\n\n if not self._entry_points:\n _, ext = os.path.splitext(program_name)\n if ext == \".py\":\n entry_point = [\"python\", program_name]\n elif ext == \".sh\":\n command = os.environ.get(\"SHELL\", \"bash\")\n entry_point = [command, program_name]\n else:\n raise LaunchError(f\"Unsupported entrypoint: {program_name}\")\n self.add_entry_point(entry_point)\n self.override_args = utils.merge_parameters(\n self.override_args, run_info[\"args\"]\n )\n else:\n assert utils._GIT_URI_REGEX.match(self.uri), (\n \"Non-wandb URI %s should be a Git URI\" % self.uri\n )\n if not self._entry_points:\n wandb.termlog(\n f\"{LOG_PREFIX}Entry point for repo not specified, defaulting to python main.py\"\n )\n self.add_entry_point([\"python\", \"main.py\"])\n branch_name = utils._fetch_git_repo(\n self.project_dir, self.uri, self.git_version\n )\n if self.git_version is None:\n self.git_version = branch_name", "def get_project(project_id):\n return Project.objects.get(id=project_id)", "def get_project(project):\n command = 'openstack project show %s' % project\n try:\n project_info = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0])\n except:\n print \"Project '%s' not found.\" % project\n sys.exit(-1)\n return project_info", "def getProject(self):\n\t\treturn self.__project", "def __get_project_id(self):\n request = urllib2.Request(self.host_api+\"projects?owner=\"+urllib2.quote(self.owner)+\"&display_name=\"+urllib2.quote(self.project_name))\n # request = urllib2.Request(self.host_api+\"projects?owner=\"+self.owner+\"&display_name=Galaxy%20Zoo%20Bar%20Lengths\")\n # print hostapi+\"projects?owner=\"+owner+\"&display_name=\"+project_name\n request.add_header(\"Accept\",\"application/vnd.api+json; version=1\")\n request.add_header(\"Authorization\",\"Bearer \"+self.token)\n\n # request\n try:\n response = urllib2.urlopen(request)\n except urllib2.HTTPError as e:\n print self.host_api+\"projects?owner=\"+self.owner+\"&display_name=\"+self.project_name\n print 'The server couldn\\'t fulfill the request.'\n print 'Error code: ', e.code\n print 'Error response body: ', e.read()\n except urllib2.URLError as e:\n print 'We failed to reach a server.'\n print 'Reason: ', e.reason\n else:\n # everything is fine\n body = response.read()\n\n # put it in json structure and extract id\n data = json.loads(body)\n return data[\"projects\"][0][\"id\"]", "def get_project(self, project_id):\n if not self.validate():\n raise SettingCustomVisionAccessFailed\n return self.get_trainer_obj().get_project(project_id=project_id)", "def getProject(self):\r\n return self.project", "def get_project(self, name=None):\n if not name:\n name = self.get_project_name()\n projects = self.get_projects()\n for p in projects:\n if p.name == name:\n return p\n raise NotFound(name)", "def GetProject(self):\n errors = []\n objects = list(request_helper.MakeRequests(\n requests=[(self.compute.projects,\n 'Get',\n self.messages.ComputeProjectsGetRequest(\n project=properties.VALUES.core.project.Get(\n required=True),\n ))],\n http=self.http,\n batch_url=self.batch_url,\n errors=errors,\n custom_get_requests=None))\n if errors:\n utils.RaiseToolException(\n errors,\n error_message='Could not fetch project resource:')\n return objects[0]", "def get_project(benchmark):\n return benchmark_config.get_config(benchmark)['project']", "def test_no_such_project(self):\n def mock_send_request(*args, **kwargs):\n return Response().update(\n remote_source_directory=directory\n ).response\n\n directory = os.path.dirname(os.path.realpath(__file__))\n response = support.run_remote_command(\n command='sync',\n mock_send_request=mock_send_request\n )\n self.assert_has_error_code(response, 'NO_PROJECT')", "def test_get_project(self):\n url = reverse(\n 'projectroles:api_project_retrieve',\n kwargs={'project': self.project.sodar_uuid},\n )\n response = self.request_knox(url)\n\n self.assertEqual(response.status_code, 200)\n response_data = json.loads(response.content)\n expected = {\n 'title': self.project.title,\n 'type': self.project.type,\n 'parent': str(self.category.sodar_uuid),\n 'description': self.project.description,\n 'readme': '',\n 'public_guest_access': False,\n 'archive': False,\n 'roles': {\n str(self.owner_as_cat.sodar_uuid): {\n 'user': self.get_serialized_user(self.user_owner_cat),\n 'role': PROJECT_ROLE_OWNER,\n 'inherited': True,\n 'sodar_uuid': str(self.owner_as_cat.sodar_uuid),\n },\n str(self.owner_as.sodar_uuid): {\n 'user': self.get_serialized_user(self.user_owner),\n 'role': PROJECT_ROLE_OWNER,\n 'inherited': False,\n 'sodar_uuid': str(self.owner_as.sodar_uuid),\n },\n },\n 'sodar_uuid': str(self.project.sodar_uuid),\n }\n self.assertEqual(response_data, expected)", "def get_project(self, name=None, project_id=None):\n if project_id:\n return self.http_call(\n \"get\", url=f\"{self.base_url}/projects/{project_id}\"\n ).json()\n elif name:\n try:\n return next(p for p in self.get_projects() if p[\"name\"] == name)\n except StopIteration:\n # Project not found\n return None\n else:\n raise ValueError(\"Must provide either a name or project_id\")", "def test_get_not_found(self):\n url = reverse(\n 'projectroles:api_project_retrieve',\n kwargs={'project': INVALID_UUID},\n )\n response = self.request_knox(url)\n self.assertEqual(response.status_code, 404)", "def test_missing_project(self):\n task = Task({\n 'name': 'test',\n 'id': 1,\n 'stage_id' : [1, 'name'],\n 'date_deadline': False,\n 'date_start': False,\n 'date_end': False,\n 'partial_messages': [{'date':'2018-10-21 12:00:00'}],\n 'kanban_state': 'blocked',\n 'planned_hours': 100,\n 'priority': '1'\n })\n self.assertIsNotNone(task)\n self.assertEqual(task.project, 'Not assigned to project')", "def test_returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/summary/\")\n self.assertEqual(response.status_code, 404)", "def _get_project(self):\n project_id = self._node.parm('project').eval()\n projects = data_block.for_houdini().projects()\n project_names = [project[\"name\"]\n for project in projects if project['id'] == project_id]\n if not project_names:\n raise hou.InvalidInput(\n \"%s %s is an invalid project.\" %\n self._node.name(), project_id)\n return {\n \"id\": project_id,\n \"name\": project_names[0]\n }", "def project():", "def project():", "def project():", "def get_project(self, project_id):\n res = self.conn.cursor().execute(\"SELECT * FROM projects where id=?\", (project_id,))\n return res.fetchone()", "def getProjectByName(self, name):\n\n for project in self.__projects:\n if project.getName() == name:\n return project\n\n return None", "def get_current_spdx_project(self) -> Optional[SpdxProject]:\n pass", "def get_project(abort_not_found=True, **project_filters):\n return get_resource(\n Project.query.filter_by(**project_filters), abort_not_found)", "def _get_projects(project_ids):\n if _ALL in project_ids:\n return projects_lib.get_all()\n return projects_lib.get_selective(project_ids)", "def getProject(self):\n return self.project", "def returns_404_if_project_doesnt_exist(self):\n # Act\n response = self.client.get(\"/api/v2/projects/999/queries/priority-areas/\")\n self.assertEqual(response.status_code, 404)", "def project(project_no_init: Project) -> Project:\n from pdm.cli.utils import merge_dictionary\n\n data = {\n \"project\": {\n \"name\": \"test-project\",\n \"version\": \"0.0.0\",\n \"description\": \"\",\n \"authors\": [],\n \"license\": {\"text\": \"MIT\"},\n \"dependencies\": [],\n \"requires-python\": \">=3.7\",\n },\n \"build-system\": DEFAULT_BACKEND.build_system(),\n }\n\n merge_dictionary(project_no_init.pyproject._data, data)\n project_no_init.pyproject.write()\n # Clean the cached property\n project_no_init._environment = None\n return project_no_init", "def _get_project_id():\n\n extras = BaseHook.get_connection('google_cloud_default').extra_dejson\n key = 'extra__google_cloud_platform__project'\n if key in extras:\n project_id = extras[key]\n else:\n raise ('Must configure project_id in google_cloud_default '\n 'connection from Airflow Console')\n return project_id", "def get_project_id():\n path = '/computeMetadata/v1/project/project-id'\n try:\n http_response = _issue_http_request(\n HTTP_GET, path, REQUIRED_METADATA_HEADER)\n return http_response.read()\n except errors.MetadataServerHttpError:\n LOGGER.exception('Unable to read project id from metadata server.')\n return None", "def get_project(self, project_id):\n endpoint = '/projects/{}'.format(project_id)\n return self._api_call('get', endpoint)", "def _loadProjects(self):\n logger.debug(\"Func: _loadProjects\")\n\n if not os.path.isfile(self._pathsDict[\"projectsFile\"]):\n return\n else:\n projectsData = self._loadJson(self._pathsDict[\"projectsFile\"])\n if projectsData == -2:\n return -2\n return projectsData", "def test_get_current(self, rf, projects):\n # get queryset\n request = rf.get(\"/projects/my\")\n view = MyProjectListView()\n view.setup(request)\n view.dispatch(request)\n view.get_queryset()\n qs = view.get_current()\n\n # slavic working group grant ended so it is \"past\"\n assert projects[\"derrida\"] in qs\n assert projects[\"pliny\"] in qs\n assert projects[\"ocampo\"] in qs\n assert projects[\"slavic\"] not in qs", "def get_projects(self):\n response = self.request(verb=requests.get, address=\"projects\")\n # FIXME: if no results, must we raise an exception?\n return response[\"results\"] if \"results\" in response else response", "def test_get_project_id_from_name_missing_proj(self, mock_get):\n mock_get.side_effect = CharonError('Error', status_code=404)\n with self.assertRaises(ValueError):\n get_project_id_from_name(self.project_name)", "def get_project(self, i):\r\n return self.__projects[i]", "def get_project(self):\n project_id = self.kwargs['project_id']\n try:\n project = Project.objects.get(pk=project_id)\n except ObjectDoesNotExist:\n raise ObjectNotFound('Not found')\n contributors = CustomUser.objects.filter(contributor__project=project.pk)\n if self.request.user not in contributors:\n raise ObjectNotFound('Not found')\n return project", "def get_project(self):\n project_id = self.kwargs['project_id']\n try:\n project = Project.objects.get(pk=project_id)\n except ObjectDoesNotExist:\n raise ObjectNotFound('Not found')\n contributors = CustomUser.objects.filter(contributor__project=project.pk)\n if self.request.user not in contributors:\n raise ObjectNotFound('Not found')\n return project", "def get_project(self):\n project_id = self.kwargs['project_id']\n try:\n project = Project.objects.get(pk=project_id)\n except ObjectDoesNotExist:\n raise ObjectNotFound('Not found')\n self.contributors = CustomUser.objects.filter(contributor__project=project.pk)\n if self.request.user not in self.contributors:\n raise ObjectNotFound('Not found')\n return project", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def project(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"project\")", "def get_keystone_v3_project_id(self, project_name):\n LOG_OBJ.debug(\"Get the project ID.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/projects?name=\" + \\\n str(project_name)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting the \"\n \"ID of project\")\n print (\"No response from Server while getting the \"\n \"ID of project\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get project ID Failed with status %s and error \"\n \": %s\" % (response.status, response.data))\n print (\"Get project ID Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"project details : %s \" % output)\n print (\"project details : %s \" % output)\n if len(output['projects']) != 1:\n LOG_OBJ.debug(\"No. of projects with name %s is %s\"\n % (project_name, len(output['projects'])))\n print(\"No. of projects with name %s is %s\"\n % (project_name, len(output['projects'])))\n return\n return output['projects'][0]['id']", "def get_project():\n\n title = request.args.get('title')\n if not title:\n return \"Please enter a title!\"\n\n project = hackbright.get_project_by_title(title)\n\n grades = hackbright.get_grades_by_title(title)\n\n if not project:\n return \"There is no project with title \\\"{}\\\".\".format(title)\n\n title, description, max_grade = project\n return render_template(\"project_info.html\",\n title=title,\n description=description,\n max_grade=max_grade,\n grades=grades)", "def test_projects_id_get(self):\n response = self.client.open('/project-tracker/projects/{id}'.format(id=56),\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def find_project_for_story(story_id):\n\n for project in Project.all():\n story = project.load_story(story_id)\n if story is not None:\n return project\n\n #Not found\n print \"No project found for story: #{}\".format(story_id)\n return None", "def project(self):\n return read_small_file(self.homeDirectory + \"/.project\")", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"GTID\", [])\r\n else:\r\n raise LookupError(\"team lookup error\")", "def test_not_authed_nonpublic_project(self):\n # Clear out existing project with ID=1 if necessary.\n Project.objects.filter(id=2).delete()\n locale = LocaleFactory.create(code='fakelocale')\n project = ProjectFactory.create(id=2, slug='valid-project', locales=[locale])\n ResourceFactory.create(project=project)\n\n response = self.client.get('/fakelocale/valid-project/')\n assert_redirects(response, reverse('pontoon.home'))\n assert_equal(self.client.session['translate_error'], {'redirect': '/fakelocale/valid-project/'})", "def get_project(self, project):\n project_name = project\n\n try:\n # FIXME: project should be an integer or str, no both\n project_id = int(project)\n except ValueError:\n project_id = None\n\n try:\n # Find the first project occurrence\n project_found = next(p for p in self.get_projects() if p[\"id\"] == project_id\n or p[\"name\"] == project_name)\n # FIXME: use namedtuple instead? create a self.project = dict()?\n self.project_name = project_found[\"name\"]\n self.project_id = project_found[\"id\"]\n self.project_address = \"projects/%s/\" % self.project_id\n except StopIteration:\n logger.error(\"Project %s not found\" % project)\n raise KeyError", "def project(self) -> aws_cdk.aws_codebuild.IProject:\n return jsii.get(self, \"project\")", "def get_project(name):\n tx = cypher_transaction()\n query = \"\"\"MATCH (n:project) WHERE n.name={project_name} RETURN n\"\"\"\n tx.append(query, parameters={'project_name': name})\n result = tx.commit()\n\n # Returns a result of the form [[\n # Record(\n # columns=('n',),\n # values=(Node('http://localhost:7474/db/data/node/233'),)\n # )\n # ]]\n return _first(result)[0].values[0]", "def find_project_for_story(story_id):\r\n\r\n for project in Project.all():\r\n story = project.load_story(story_id)\r\n if story is not None:\r\n return project\r\n\r\n #Not found\r\n print \"No project found for story: #{}\".format(story_id)\r\n return None" ]
[ "0.70699006", "0.68557197", "0.66254395", "0.64616627", "0.63413024", "0.62891513", "0.62088645", "0.61985654", "0.612253", "0.60614514", "0.60256624", "0.6006625", "0.5999078", "0.59616834", "0.5941112", "0.5911068", "0.5911068", "0.59038454", "0.5900037", "0.5870759", "0.58625996", "0.58564776", "0.5852726", "0.5851352", "0.584494", "0.58329433", "0.58195937", "0.58105063", "0.57972395", "0.5792592", "0.5792147", "0.57807964", "0.57752806", "0.5763392", "0.57580507", "0.5752337", "0.5742942", "0.5728546", "0.57031095", "0.56890815", "0.56865084", "0.5682047", "0.5672753", "0.5652602", "0.56516093", "0.56466156", "0.5643029", "0.56364036", "0.56364036", "0.56364036", "0.56246114", "0.5620364", "0.5579865", "0.5549485", "0.55437255", "0.55228925", "0.55041796", "0.5475201", "0.5472807", "0.54677176", "0.5465815", "0.54622006", "0.5451212", "0.5449375", "0.5448989", "0.5447617", "0.5446086", "0.5446086", "0.5438718", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5429608", "0.5420862", "0.5419421", "0.5419175", "0.5418224", "0.5414242", "0.5400886", "0.53998655", "0.5395399", "0.53933966", "0.5387914", "0.5379477" ]
0.7359438
0
Should write to the console using a write_source function call on the internal step report's stdout_interceptor.
def test_write_to_console(self, _step: PropertyMock): trials = [2, True, None, 'This is a test', b'hello'] for message in trials: _step_mock = MagicMock() write_source = MagicMock() _step_mock.report.stdout_interceptor.write_source = write_source _step.return_value = _step_mock step = exposed.ExposedStep() step.write_to_console(message) args, kwargs = write_source.call_args self.assertEqual('{}'.format(message), args[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_render_to_console(self, _step: PropertyMock):\n message = ' {{ a }} is not {{ b }}.'\n\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.render_to_console(message, a=7, b='happy')\n\n args, kwargs = write_source.call_args\n self.assertEqual('7 is not happy.', args[0])", "def stdout(self):\n pass", "def test_PrintSmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._Print('hi there')\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def setUp(self):\n self.actualstdout = sys.stdout\n sys.stdout = StringIO.StringIO()", "def setUp(self):\n\t\tself.output = self.switchstdout()", "def enable(self):\n self.out = StringIO()\n self._stdout = sys.stdout\n sys.stdout = self.out", "def p(self):\n self.printstdout = True", "def javaScriptConsoleMessage(self, message, line_number, source_id):\n print 'Console:', message, line_number, source_id", "def print_cmd_line(s, target, src, env):\n sys.stdout.write(\" Making %s...\\n\"% (' and '.join([str(x) for x in target])))", "def capture_stdout(sq, method):\n capture = io.StringIO()\n sys.stdout = capture\n if method == \"print\":\n print(sq)\n else:\n sq.display()\n sys.stdout = sys.__stdout__\n return capture", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def log_stdout(self, function):\n return function()", "def stdout(self):\n if not hasattr(self, \"my_stdout_proxy\"):\n self.my_stdout_proxy = self.outfile_proxy()\n self.my_stdout_proxy_created = 1\n return self.my_stdout_proxy", "def test_export_custom(self): # pylint: disable=no-self-use\n mock_record_str = Mock(str)\n\n def formatter(record): # pylint: disable=unused-argument\n return mock_record_str\n\n mock_stdout = Mock()\n exporter = ConsoleLogExporter(out=mock_stdout, formatter=formatter)\n log_data = LogData(\n log_record=LogRecord(),\n instrumentation_scope=InstrumentationScope(\n \"first_name\", \"first_version\"\n ),\n )\n exporter.export([log_data])\n mock_stdout.write.assert_called_once_with(mock_record_str)", "def hook_print():\n sys.stdout = PrintHook()", "def write_line(self, line):\n # TODO(iannucci): have step_runner log the step metadata as a protobuf\n # and/or put it in the Step proto message.\n return self.logging.write_line(line)", "def test_stdout_log(self, logger: Logger) -> None:\n task = OctaveTask()\n task.session_id = \"123\"\n handler = OutputHandler(task)\n logger.addHandler(handler)\n\n # Write something to the log\n msg = \"I am a message\"\n logger.info(msg)\n\n assert len(handler.contents) == 1\n assert handler.messages() == msg", "def test_stdout(self):\n stdout = StringIO()\n self.patch(sys, 'stdout', stdout)\n\n # Suppress warnings so that if there are any old-style plugins that\n # lore queries for don't confuse the assertion below. See #3070.\n self.patch(warnings, 'warn', lambda *a, **kw: None)\n self.test_buildTeX()\n self.assertEqual(stdout.getvalue(), '')", "def redirect_stdout():\n save_stdout = sys.stdout\n sys.stdout = _TQDMFile(sys.stdout)\n yield\n sys.stdout = save_stdout", "def print_out():\n pass", "def stdout2Log(self):\n sys.stdout = self\n sys.stderr = self\n return", "def do_print(self, line):\n cmd_args = io.parse_cmd_args(line, io.output_cmd_pattern)\n if cmd_args:\n success = self.manager.print_to_console(\n cmd_args.get('target'), \n cmd_args.get('filters')\n )\n if success:\n self.console_print(\"There, you asked for it!\", settings.INFO_FORMAT)\n else:\n self.console_print(\"Sorry, something kinda went wrong! You can try again.\", settings.ERROR_FORMAT)\n else:\n self.console_print(settings.COMMMAND_ARGS_ERROR_MSG, settings.ERROR_FORMAT)", "def test_PrintLoudlySmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._PrintLoudly('hi there')\n self.AssertOutputContainsLine(r'\\*{10}', check_stderr=True)\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def tprint(self, cmd, end='\\n'):\n if ENABLE_DEBUG:\n stackIndex = 0\n for index, stackFrame in enumerate(stack()):\n caller = getframeinfo(stackFrame[0])\n if caller.filename == fullPath:\n stackIndex = index\n break \n caller = getframeinfo(stack()[stackIndex][0])\n self.fileHandle.write(\"# \" + targetFile + \":\" + str(caller.lineno) + '\\n')\n self.tprint_raw(cmd, end)", "def test_debug_output(self):\n assert output(self.msg) is not None", "def write_output(self):", "def dumps(self) -> str:\n code_file_path = os.path.join(\n self.project.source_directory,\n self.filename\n )\n code = dict(\n filename=self.filename,\n path=code_file_path,\n code=render.code_file(code_file_path)\n )\n\n if not self.is_running:\n # If no longer running, make sure to flush the stdout buffer so\n # any print statements at the end of the step get included in\n # the body\n self.report.flush_stdout()\n\n # Create a copy of the body for dumping\n body = self.report.body[:]\n\n if self.is_running:\n # If still running add a temporary copy of anything not flushed\n # from the stdout buffer to the copy of the body for display. Do\n # not flush the buffer though until the step is done running or\n # it gets flushed by another display call.\n body.append(self.report.read_stdout())\n\n body = ''.join(body)\n\n has_body = len(body) > 0 and (\n body.find('<div') != -1 or\n body.find('<span') != -1 or\n body.find('<p') != -1 or\n body.find('<pre') != -1 or\n body.find('<h') != -1 or\n body.find('<ol') != -1 or\n body.find('<ul') != -1 or\n body.find('<li') != -1\n )\n\n std_err = (\n self.report.read_stderr()\n if self.is_running else\n self.report.flush_stderr()\n ).strip('\\n').rstrip()\n\n dom = templating.render_template(\n 'step-body.html',\n last_display_update=self.report.last_update_time,\n elapsed_time=self.get_elapsed_timestamp(),\n code=code,\n body=body,\n has_body=has_body,\n id=self.definition.name,\n title=self.report.title,\n subtitle=self.report.subtitle,\n summary=self.report.summary,\n error=self.error,\n index=self.index,\n is_running=self.is_running,\n progress_message=self.progress_message,\n progress=int(round(max(0, min(100, 100 * self.progress)))),\n sub_progress_message=self.sub_progress_message,\n sub_progress=int(round(max(0, min(100, 100 * self.sub_progress)))),\n std_err=std_err\n )\n\n if not self.is_running:\n self.dom = dom\n return dom", "def testStdoutReadDuringCapture(self):\n with self.OutputCapturer():\n print('foo')\n self.AssertOutputContainsLine('foo')\n print('bar')\n self.AssertOutputContainsLine('bar')\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar')", "def javaScriptConsoleMessage(self, message, line, source):\n\n super(GRobotWebPage, self).javaScriptConsoleMessage(message, line,\n source)\n log_type = \"error\" if \"Error\" in message else \"info\"\n getattr(logger, log_type)(\"%s(%d): %s\" % (source or '<unknown>', line, message))", "def test_main_output(self, capsys):\n args = self.args.copy()\n args[\"out_file\"] = \"text.txt\"\n UI.main(**args)\n captured = capsys.readouterr().out\n assert \"Results written to text.txt\" in captured", "def _print_source(f):\n\n @_wraps(f)\n def wrapper(*args, **kwargs):\n source = _getsource(f)\n print(_clean_source(source))\n return f(*args, **kwargs)\n\n return wrapper", "def javaScriptConsoleMessage(\n self, level, message, line_number, source_id\n ):\n logger.debug(\n \"[WebEngine] level=%s %s:%s %s\",\n level, source_id, line_number, message\n )", "def render_entry_log(self):\n self.render_log(self.selenium_testcase_entry_template)", "def test_write_to_console_fail(self, _step: PropertyMock):\n _step.return_value = None\n step = exposed.ExposedStep()\n with self.assertRaises(ValueError):\n step.write_to_console('hello')", "def logOutput(self, line):\r\n self.writeToLog('output', line)", "def test_export(self): # pylint: disable=no-self-use\n log_data = LogData(\n log_record=LogRecord(\n timestamp=int(time.time() * 1e9),\n trace_id=2604504634922341076776623263868986797,\n span_id=5213367945872657620,\n trace_flags=TraceFlags(0x01),\n severity_text=\"WARN\",\n severity_number=SeverityNumber.WARN,\n body=\"Zhengzhou, We have a heaviest rains in 1000 years\",\n resource=SDKResource({\"key\": \"value\"}),\n attributes={\"a\": 1, \"b\": \"c\"},\n ),\n instrumentation_scope=InstrumentationScope(\n \"first_name\", \"first_version\"\n ),\n )\n exporter = ConsoleLogExporter()\n # Mocking stdout interferes with debugging and test reporting, mock on\n # the exporter instance instead.\n\n with patch.object(exporter, \"out\") as mock_stdout:\n exporter.export([log_data])\n mock_stdout.write.assert_called_once_with(\n log_data.log_record.to_json() + os.linesep\n )\n\n self.assertEqual(mock_stdout.write.call_count, 1)\n self.assertEqual(mock_stdout.flush.call_count, 1)", "def __exit__(self, exc_type, exc_val, exc_tb):\n sys.stdout.flush()\n sys.stdout.close()\n sys.stdout = sys.__stdout__", "def stdio(self):\n\n if isinstance(self.log_file, TotalLogFile):\n self.stdio_stolen = True\n self.log_file.stdio()", "async def console_writer(payload: ConsumerPayload):\n print(f\"console writer: {payload}\")", "def redirect_stdout(new_target=None):\n\n if not new_target:\n new_target = StringIO()\n\n _ = sys.stdout\n try:\n sys.stdout = new_target\n yield new_target\n finally:\n sys.stdout = _", "def debug(self, *args):\n\n if self.is_on(_Log.DEBUG):\n self._write(self._out, *args)", "def result(self, step):\n indent_extra = 0\n if self.current_rule:\n indent_extra = self.indent_size\n\n step = self.steps.pop(0)\n indent = make_indentation(2 * self.indent_size + indent_extra)\n if self.show_aligned_keywords:\n # -- RIGHT-ALIGN KEYWORDS (max. keyword width: 6):\n text = u\"%s%6s %s ... \" % (indent, step.keyword, step.name)\n else:\n text = u\"%s%s %s ... \" % (indent, step.keyword, step.name)\n self.stream.write(text)\n\n status_text = step.status.name\n if self.show_timings:\n status_text += \" in %0.3fs\" % step.duration\n\n unicode_errors = 0\n if step.error_message:\n try:\n self.stream.write(u\"%s\\n%s\\n\" % (status_text, step.error_message))\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s\\n\" % status_text)\n self.stream.write(u\"%s while writing error message: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n else:\n self.stream.write(u\"%s\\n\" % status_text)\n\n if self.show_multiline:\n if step.text:\n try:\n self.doc_string(step.text)\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s while writing docstring: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n if step.table:\n self.table(step.table)", "def test_print_end(self):\n response = support.create_project(self, 'madison')\n self.assertFalse(\n response.failed,\n Message('should have created project', response=response)\n )\n\n print_string = string.ascii_lowercase\n\n code = '\\n'.join([\n 'import cauldron as cd',\n 'cd.display.text(\"Hello World\")',\n 'print(\"{}\")'.format(print_string)\n ])\n\n support.add_step(self, contents=code)\n\n response = support.run_command('run -f')\n self.assertFalse(\n response.failed,\n Message('should have run step', response=response)\n )\n\n project = cauldron.project.get_internal_project()\n dom = project.steps[1].dom # type: str\n\n self.assertEqual(\n dom.count(print_string),\n 2,\n 'should have printed ascii lowercase'\n )", "def PrintLogs(self) -> None:\n assert self.Finished()\n for f, stream_name in (\n (self.stdout, \"STDOUT\"), (self.stderr, \"STDERR\")):\n f.flush()\n f.seek(0)\n # Since we collected binary data, we have to write binary data.\n encoded = (stream_name.encode(), str(self).encode())\n sys.stdout.buffer.write(b\"BEGIN %s of test %s\\n\" % encoded)\n sys.stdout.buffer.write(f.read())\n sys.stdout.buffer.write(b\"END %s of test %s\\n\" % encoded)\n sys.stdout.buffer.flush()", "def switchstdout(self):\n\t\tself.old_stdout = sys.stdout\n\t\tsys.stdout = result = StringIO()\n\t\treturn result", "def log(self, *args):\n self.log_stdout(*args)\n print(*args, file=self.general_log_file.file)\n self.general_log_file.flush()", "def read_stdout(self, dt):\n\n self.temp_stdout += self.temp_output\n self.ids[\"txt_code_output\"].text = self.temp_output", "def on_line(self, stream_name, line):\n if stream_name == 'stdout':\n self.stdout.write(line)\n elif stream_name == 'stderr':\n self.stderr.write(line)", "def log_step(step: int, message: str, stdout: bool = True) -> None:\n log(f\"Step {step:6d}: {message}\", stdout=stdout)", "def testStdoutAndStderr(self):\n with self.OutputCapturer():\n print('foo')\n print('bar', file=sys.stderr)\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)", "def on_begin(self, args, kwargs):\n self.stdout = open(self.stdout_path, \"wb\")\n self.stderr = open(self.stderr_path, \"wb\")", "def recordStdout(self, test, output):\n if output:\n test = proto_test(test)\n self.stdout_output[test] = output", "def save_on_host(\n self, host_outputs: Any, writer: SummaryWriter, step: int\n ) -> None:\n raise NotImplementedError", "def OnSim42RunCmdFileDump(self, event):\n path = self.PromptPathOpenCmd()\n if not path: return\n pathOut = self.PromptPathSaveCmd()\n if not pathOut: return\n f = open(pathOut, 'w')\n oldOut = self.sim42interp.cmd.output\n oldOutSys = sys.stdout\n self.sim42interp.cmd.output = f\n sys.stdout = f\n self.IgnoreMessages()\n self.RunCmdFile(path)\n self.UnIgnoreMessages()\n f.close()\n self.sim42interp.cmd.output = oldOut\n sys.stdout = oldOutSys", "def stdout(self):\n if self.dm.fileExists(self.proc):\n try:\n t = self.dm.pullFile(self.proc)\n except DMError:\n # we currently don't retry properly in the pullFile\n # function in dmSUT, so an error here is not necessarily\n # the end of the world\n return ''\n newLogContent = t[self.stdoutlen:]\n self.stdoutlen = len(t)\n # Match the test filepath from the last TEST-START line found in the new\n # log content. These lines are in the form:\n # 1234 INFO TEST-START | /filepath/we/wish/to/capture.html\\n\n testStartFilenames = re.findall(r\"TEST-START \\| ([^\\s]*)\", newLogContent)\n if testStartFilenames:\n self.lastTestSeen = testStartFilenames[-1]\n return newLogContent.strip('\\n').strip()\n else:\n return ''", "def pytest_logger_stdoutloggers(self, item):", "def render_exit_log(self):\n self.render_log(self.selenium_testcase_exit_template)", "def addSource(self, source):\n self.tprint('source ' + source)", "def write(self, text: str) -> None:\r\n if len(text) == 0: # workaround for a bug in VSCode debugger: sys.stdout.write(''); sys.stdout.flush() => crash\r\n return\r\n\r\n if self.file is not None:\r\n self.file.write(text)\r\n\r\n self.stdout.write(text)\r\n\r\n if self.should_flush:\r\n self.flush()", "def real_print(*args, **kwargs):\n\n kwargs.setdefault('file', real_stdout)\n _python_print_function(*args, **kwargs)", "def test_print_start(self):\n response = support.create_project(self, 'chicago')\n self.assertFalse(\n response.failed,\n Message('should have created project', response=response)\n )\n\n print_string = string.ascii_lowercase\n\n code = '\\n'.join([\n 'import cauldron as cd',\n 'print(\"{}\")'.format(print_string),\n 'cd.display.text(\"Hello World\")'\n ])\n\n support.add_step(self, contents=code)\n\n response = support.run_command('run -f')\n self.assertFalse(\n response.failed,\n Message('should have run step', response=response)\n )\n\n project = cauldron.project.get_internal_project()\n dom = project.steps[1].dom # type: str\n\n self.assertEqual(\n dom.count(print_string),\n 2,\n 'should have printed ascii lowercase'\n )", "def printOutput(self):\n pass", "def do_write_and_execute(self, arg):\n self._print_func_result(self.phil.write_and_execute, arg)", "def print_outcome(self) -> None:\n pass", "def test_capture_stdout_works_with_print(self):\n with debug_env:\n with captured_stdout() as stdout:\n print(\"wibble\")\n\n self.assertIn(\"wibble\", stdout.getvalue())", "def logStarted(build, step, log):", "def stdout_path(self):\n return self.log_path\n # return self.path / 'stdout.txt'", "def test_output(self):\n work_logs = [WorkLog(\"MYB-7\", datetime(2020, 1, 20), 3600, \"René Doe\"),\n WorkLog(\"MYB-5\", datetime(2020, 1, 18), 3600, \"John Doe\"),\n WorkLog(\"MYB-5\", datetime(2020, 1, 18), 5400, \"John Doe\"),\n WorkLog(\"MYB-5\", datetime(2020, 1, 12), 3600, \"John Doe\")]\n\n issue_myb_5 = Issue(10005, \"MYB-5\", \"Summary of issue MYB-5\", \"MYB-3\", \"Summary of the parent issue of MYB-5\", 3600, 900, datetime(2020, 1, 15))\n issue_myb_5.issue_start_date = datetime(2020, 1, 10)\n issue_myb_7 = Issue(10007, \"MYB-7\", \"Summary of issue MYB-7\", None, None, None, None, None)\n\n issues = [issue_myb_5,\n issue_myb_7]\n\n stdout = sys.stdout\n with open('jira-time-report-console.txt', 'w') as sys.stdout:\n jiratimereport.process_work_logs(\"console\", issues, work_logs)\n sys.stdout = stdout\n self.assertTrue(filecmp.cmp('console_output.txt', 'jira-time-report-console.txt'))\n\n jiratimereport.process_work_logs(\"csv\", issues, work_logs)\n self.assertTrue(filecmp.cmp('csv_output.csv', 'jira-time-report.csv'))\n\n jiratimereport.process_work_logs(\"excel\", issues, work_logs)\n expected_excel = pd.read_excel('excel_output.xlsx')\n actual_excel = pd.read_excel('jira-time-report.xlsx')\n self.assertTrue(expected_excel.equals(actual_excel))", "def before_all(context):\n def run_next_action():\n \"\"\"Run Next-action and return both stderr and stdout.\"\"\"\n os.environ[\"BROWSER\"] = 'echo %s'\n result = subprocess.run(context.arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding=\"utf-8\")\n return result.stdout + result.stderr\n\n context.next_action = run_next_action\n subprocess.run([\"coverage\", \"erase\"])", "def trace(self, out):\n if self.step == 0:\n out.write(\"# %5s %16s %8s %8s %7s\\n\" \\\n % ('Step', 'Current energy', 'Av shift',\n 'Mx shift', 'Funcs'))\n log = \"%7d %16.5f %8.4f %8.4f %7d\\n\" \\\n % (self.step, self.current_e, self.shiftavr,\n self.shiftmax, self.funcs)\n out.write(log)", "def write(self,s):\n if self.passthrough:\n sys.stdout=sys.__stdout__\n print s\n sys.stdout=sys.self\n return \n cb=self._determine_context_buffer(s)\n cb.write(s)\n self.dump()", "def write(self, args, gen, out=sys.stdout):\n seqsum = sniff(gen)\n\n out.write(str(seqsum))", "def report(self, obj, message, linenum, char_offset=0):\n self.controller.report(linenumber=linenum, filename=obj.path,\n severity=self.severity, message=message,\n rulename = self.__class__.__name__,\n char=char_offset)", "def test_print_solo(self):\n response = support.create_project(self, 'minneapolis')\n self.assertFalse(\n response.failed,\n Message('should have created project', response=response)\n )\n\n print_string = string.ascii_lowercase\n\n code = '\\n'.join([\n 'values = [x ** 2 for x in range(100)]',\n 'print(\"{}\")'.format(print_string)\n ])\n\n support.add_step(self, contents=code)\n\n response = support.run_command('run -f')\n self.assertFalse(\n response.failed,\n Message('should have run step', response=response)\n )\n\n project = cauldron.project.get_internal_project()\n dom = project.steps[1].dom # type: str\n\n self.assertEqual(\n dom.count(print_string),\n 2,\n 'should have printed ascii lowercase'\n )", "def _output(self, message, verbosity, exact, stream):\n if exact:\n if self.config.verbosity == verbosity:\n stream.write(message + \"\\n\")\n else:\n if self.config.verbosity >= verbosity:\n stream.write(message + \"\\n\")", "def write_to_runner(line, log_file=None, check_status=False):\n log_line = \"\"\n if log_file is not None:\n log_line = f\" &> {log_file} 2>&1\"\n line += log_line\n line += \"\\n\"\n f_run.write(line)\n if check_status:\n f_run.write(\"\\nReturnValue=$?\\n\")\n f_run.write(\"if [[ $ReturnValue != 0 ]]; then\\n\")\n f_run.write(\" echo \\\"Encountered error with command: '\")\n f_run.write(line.replace(log_line, \"\").replace(\"\\\"\", \"\\\\\\\"\").strip())\n f_run.write(\"'\\\"\\n\")\n if log_file is not None:\n f_run.write(\n f\" echo \\\"Check log: '{log_file.strip()}'\\\"\\n\")\n f_run.write(\" exit $ReturnValue\\n\")\n f_run.write(\"fi\\n\")", "def test_03_pass_print(self):\n print('Hello World!')", "def do_inspect_with_source(self, arg):\n self._do_inspect(arg, with_source=True)", "def testRunSmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage.Run()", "def run_report_generation(**kwargs):\n out = run_python_script_helper(\n os.path.dirname(__file__), \"report_generation_example.py\", **kwargs\n )\n return out", "def get_stdout(self):\n return self._get_log('stdout')", "def test_get_source_log(self):\n pass", "def Write(self):\n if self._project_definition.name in self._PROJECTS_WITH_PYTHON3_AS_DEFAULT:\n shebang = '#!/usr/bin/env python3'\n else:\n shebang = '#!/usr/bin/env python'\n\n template_mappings = {\n 'project_name': self._project_definition.name,\n 'shebang': shebang,\n }\n\n if self._project_definition.name == 'plaso':\n template_file = 'check_dependencies-with_url.py'\n else:\n template_file = 'check_dependencies.py'\n\n template_file = os.path.join(\n self._l2tdevtools_path, self._TEMPLATE_DIRECTORY, template_file)\n file_content = self._GenerateFromTemplate(template_file, template_mappings)\n\n with io.open(self.PATH, 'w', encoding='utf-8') as file_object:\n file_object.write(file_content)", "def write(self, line):\n if self._stdout:\n print(line, end=\"\")\n else:\n self._file_descriptor.write(line)", "def log(self, event):\n\n log_message = '{} - {} file: {}'.format(\n datetime.now().strftime('%Y-%m-%d %H:%M:%S'),\n event.event_type.capitalize(),\n event.src_path\n )\n\n if hasattr(event, 'dest_path'):\n log_message += ' => {}'.format(event.dest_path)\n\n sys.stdout.write(log_message + '\\n')\n sys.stdout.flush()", "def __init__(self):\n super(StdoutWriter, self).__init__()", "def main(source_file_path, verbose):\n setup_logging(verbose=verbose)\n print(find_test_file(source_file_path))", "def print_report_text(self, stream, time_taken, out, err):\n # stream.write('<testsuite errors=\"%(e)d\" failures=\"%(f)d\" ' % \\\n # { \"e\": len(self.errors), \"f\": len(self.failures) })\n # stream.write('name=\"%(n)s\" tests=\"%(t)d\" time=\"%(time).3f\">\\n' % \\\n # {\n # \"n\": self._test_name,\n # \"t\": self.testsRun,\n # \"time\": time_taken,\n # })\n for info in self._tests:\n info.print_report_text(stream)", "def _print_callback(sample):\n\n print sample", "def report_bug(self, line):\n # Store the output of our command\n with capture_output() as shell_output:\n self.shell.run_cell(line)\n # Show the capture output to the user\n shell_output.show()\n # Create the report\n report_bug(prior_commands=1, captured_output=shell_output.stdout)", "def reporter(self, lnum, col, text, check):\n self.output.append([lnum, col, text])", "def report_coverage(fp=None, details=False):\n if fp is None:\n fp = sys.stdout\n fp.write(get_coverage_report(details))", "def log_output(self, output):\n if self.logfile:\n click.echo(utf8tounicode(output), file=self.logfile)", "def emit(self, src_record):\n # transform\n dst_record = self.transform \\\n (copy.copy(src_record))\n\n # emit to console\n logging.StreamHandler.emit \\\n (self, dst_record)", "def _write_source(self, lines: List[str], indents: Sequence[str] = ()) -> None:\n if indents and len(indents) != len(lines):\n raise ValueError(\n \"indents size ({}) should have same size as lines ({})\".format(\n len(indents), len(lines)\n )\n )\n if not indents:\n indents = [\"\"] * len(lines)\n source = \"\\n\".join(lines)\n new_lines = self._highlight(source).splitlines()\n for indent, new_line in zip(indents, new_lines):\n self.line(indent + new_line)", "def collect_output(self):\n pass", "def collect_output(self):\n pass", "def log_stdout(self, *args, **kwargs):\n\n do_print = False\n if 'level' in kwargs:\n if kwargs['level'] >= self.log_level:\n do_print = True\n else:\n do_print = False\n else:\n do_print = True\n\n if do_print:\n print(*args, file=sys.stdout)\n sys.stdout.flush()", "def report(self, output_dir):", "def writeOutput(self):\n\n self.collect.writeOutput()" ]
[ "0.7060722", "0.6297328", "0.62191683", "0.6036808", "0.59491473", "0.5853387", "0.58372104", "0.5825977", "0.5810231", "0.5803809", "0.5773342", "0.57336247", "0.5707204", "0.5692095", "0.56633615", "0.5617709", "0.5614963", "0.55947864", "0.55731905", "0.5567739", "0.55644125", "0.55444604", "0.5526443", "0.54957384", "0.5490355", "0.54782814", "0.5461037", "0.5456289", "0.5430993", "0.5425914", "0.5422019", "0.5414985", "0.5398086", "0.5391726", "0.5373806", "0.53590286", "0.53554434", "0.5352086", "0.5322487", "0.5311027", "0.5302629", "0.529754", "0.52938455", "0.52923834", "0.52839285", "0.52634215", "0.5252254", "0.5249231", "0.52463794", "0.5243776", "0.5242703", "0.5239692", "0.5234702", "0.5220071", "0.5218207", "0.5217713", "0.5214816", "0.52142173", "0.5206002", "0.5182827", "0.5181323", "0.51763654", "0.5170797", "0.51694673", "0.5165652", "0.51597446", "0.51562446", "0.5153721", "0.5153439", "0.5148408", "0.51464194", "0.51457", "0.5144079", "0.514243", "0.5140744", "0.5137169", "0.5135063", "0.5129249", "0.51262903", "0.5124716", "0.512043", "0.5119351", "0.51157796", "0.51125926", "0.5112189", "0.5111385", "0.5107077", "0.5104793", "0.51041085", "0.5097756", "0.5092678", "0.5086554", "0.50834584", "0.5082463", "0.507943", "0.5073557", "0.5073557", "0.507335", "0.50723875", "0.50718683" ]
0.7509089
0
Should render to the console using a write_source function call on the internal step report's stdout_interceptor.
def test_render_to_console(self, _step: PropertyMock): message = ' {{ a }} is not {{ b }}.' _step_mock = MagicMock() write_source = MagicMock() _step_mock.report.stdout_interceptor.write_source = write_source _step.return_value = _step_mock step = exposed.ExposedStep() step.render_to_console(message, a=7, b='happy') args, kwargs = write_source.call_args self.assertEqual('7 is not happy.', args[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_write_to_console(self, _step: PropertyMock):\n trials = [2, True, None, 'This is a test', b'hello']\n\n for message in trials:\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.write_to_console(message)\n\n args, kwargs = write_source.call_args\n self.assertEqual('{}'.format(message), args[0])", "def stdout(self):\n pass", "def render_entry_log(self):\n self.render_log(self.selenium_testcase_entry_template)", "def test_PrintSmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._Print('hi there')\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def enable(self):\n self.out = StringIO()\n self._stdout = sys.stdout\n sys.stdout = self.out", "def setUp(self):\n\t\tself.output = self.switchstdout()", "def setUp(self):\n self.actualstdout = sys.stdout\n sys.stdout = StringIO.StringIO()", "def capture_stdout(sq, method):\n capture = io.StringIO()\n sys.stdout = capture\n if method == \"print\":\n print(sq)\n else:\n sq.display()\n sys.stdout = sys.__stdout__\n return capture", "def render_exit_log(self):\n self.render_log(self.selenium_testcase_exit_template)", "def log_stdout(self, function):\n return function()", "def hook_print():\n sys.stdout = PrintHook()", "def p(self):\n self.printstdout = True", "def test_stdout(self):\n stdout = StringIO()\n self.patch(sys, 'stdout', stdout)\n\n # Suppress warnings so that if there are any old-style plugins that\n # lore queries for don't confuse the assertion below. See #3070.\n self.patch(warnings, 'warn', lambda *a, **kw: None)\n self.test_buildTeX()\n self.assertEqual(stdout.getvalue(), '')", "def dumps(self) -> str:\n code_file_path = os.path.join(\n self.project.source_directory,\n self.filename\n )\n code = dict(\n filename=self.filename,\n path=code_file_path,\n code=render.code_file(code_file_path)\n )\n\n if not self.is_running:\n # If no longer running, make sure to flush the stdout buffer so\n # any print statements at the end of the step get included in\n # the body\n self.report.flush_stdout()\n\n # Create a copy of the body for dumping\n body = self.report.body[:]\n\n if self.is_running:\n # If still running add a temporary copy of anything not flushed\n # from the stdout buffer to the copy of the body for display. Do\n # not flush the buffer though until the step is done running or\n # it gets flushed by another display call.\n body.append(self.report.read_stdout())\n\n body = ''.join(body)\n\n has_body = len(body) > 0 and (\n body.find('<div') != -1 or\n body.find('<span') != -1 or\n body.find('<p') != -1 or\n body.find('<pre') != -1 or\n body.find('<h') != -1 or\n body.find('<ol') != -1 or\n body.find('<ul') != -1 or\n body.find('<li') != -1\n )\n\n std_err = (\n self.report.read_stderr()\n if self.is_running else\n self.report.flush_stderr()\n ).strip('\\n').rstrip()\n\n dom = templating.render_template(\n 'step-body.html',\n last_display_update=self.report.last_update_time,\n elapsed_time=self.get_elapsed_timestamp(),\n code=code,\n body=body,\n has_body=has_body,\n id=self.definition.name,\n title=self.report.title,\n subtitle=self.report.subtitle,\n summary=self.report.summary,\n error=self.error,\n index=self.index,\n is_running=self.is_running,\n progress_message=self.progress_message,\n progress=int(round(max(0, min(100, 100 * self.progress)))),\n sub_progress_message=self.sub_progress_message,\n sub_progress=int(round(max(0, min(100, 100 * self.sub_progress)))),\n std_err=std_err\n )\n\n if not self.is_running:\n self.dom = dom\n return dom", "def stdout(self):\n if not hasattr(self, \"my_stdout_proxy\"):\n self.my_stdout_proxy = self.outfile_proxy()\n self.my_stdout_proxy_created = 1\n return self.my_stdout_proxy", "def javaScriptConsoleMessage(self, message, line_number, source_id):\n print 'Console:', message, line_number, source_id", "def __execute_reporter(self):\n if not self.__args.report:\n return\n reporter.HTMLReporter().generate_report_from_file(\n self.__lst_json_files)", "def test_PrintLoudlySmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage._PrintLoudly('hi there')\n self.AssertOutputContainsLine(r'\\*{10}', check_stderr=True)\n self.AssertOutputContainsLine('hi there', check_stderr=True)", "def _print_source(f):\n\n @_wraps(f)\n def wrapper(*args, **kwargs):\n source = _getsource(f)\n print(_clean_source(source))\n return f(*args, **kwargs)\n\n return wrapper", "def print_cmd_line(s, target, src, env):\n sys.stdout.write(\" Making %s...\\n\"% (' and '.join([str(x) for x in target])))", "def setup(self) -> \"None\":\n # Patch the renderer to extend the output height\n renderer._output_screen_diff = _patched_output_screen_diff\n\n if config.page and sys.stdout.isatty():\n # Use a temporary file as display output if we are going to page the output\n from tempfile import TemporaryFile\n\n self.out_file = TemporaryFile(\"w+\")\n\n else:\n if config.page:\n log.warning(\"Cannot page output because standard output is not a TTY\")\n # If we are not paging output, determine when to print it\n if config.dump_file is None or str(config.dump_file) in (\n \"-\",\n \"/dev/stdout\",\n ):\n self.out_file = sys.stdout\n elif str(config.dump_file) == \"/dev/stderr\":\n self.out_file = sys.stderr\n else:\n try:\n self.out_file = open(config.dump_file, \"w+\")\n except (\n FileNotFoundError,\n PermissionError,\n io.UnsupportedOperation,\n ) as error:\n log.error(error)\n log.error(\n f\"Output file `{config.dump_file}` cannot be opened. \"\n \"Standard output will be used.\"\n )\n self.out_file = sys.stdout\n\n # Ensure we do not recieve the \"Output is not a terminal\" message\n Vt100_Output._fds_not_a_terminal.add(self.out_file.fileno())\n # Do not use stderr instead of stdout if stdout is not a tty\n self.out_file = cast(\"TextIO\", self.out_file)\n self.output = create_output(self.out_file, always_prefer_tty=False)\n\n # Use the width and height of stderr (this gives us the terminal size even if\n # output is being piped to a non-tty)\n # if hasattr(self.output, '_get_size'):\n setattr(self.output, \"get_size\", create_output(stdout=sys.stderr).get_size)\n\n # Disable character position requests when dumping output to stop extra output\n # This also speeds things up as we do not need to wait for the response\n # Ignore typing here as mypy does not understand __class__\n class DumpingOutput(self.output.__class__): # type: ignore\n # Disable character position requests when dumping output\n responds_to_cpr = False\n\n # Patch the output to prevent CPR detection\n self.output.__class__ = DumpingOutput\n\n # Set pre-run commands\n self.pre_run.append(self.post_dump)", "def test_capture_stdout():\n\n sys.stdout.write('Print to stdout')\n\n assert False", "def do_print(self, line):\n cmd_args = io.parse_cmd_args(line, io.output_cmd_pattern)\n if cmd_args:\n success = self.manager.print_to_console(\n cmd_args.get('target'), \n cmd_args.get('filters')\n )\n if success:\n self.console_print(\"There, you asked for it!\", settings.INFO_FORMAT)\n else:\n self.console_print(\"Sorry, something kinda went wrong! You can try again.\", settings.ERROR_FORMAT)\n else:\n self.console_print(settings.COMMMAND_ARGS_ERROR_MSG, settings.ERROR_FORMAT)", "def render_log(self, template):\n\n # only write to the log file if it exists\n if self._selenium_log_file:\n\n id = self.id()\n description = self.shortDescription()\n\n # grab the stack frame info from test_* method\n (obj, filename, lineno, function, code_context, index) \\\n = self.get_test_frame()\n\n # render the test case debug\n html = render_to_string(\n template, {\n 'id': id,\n 'description': description,\n 'filename': filename,\n 'lineno': lineno,\n 'function': function,\n 'code_context': code_context,\n 'index': index,\n 'png': self.get_image_uri(),\n 'text': self.get_visible_text()})\n\n # write it to the file\n self._selenium_log_file.write(html.encode('utf8'))", "def test_export_custom(self): # pylint: disable=no-self-use\n mock_record_str = Mock(str)\n\n def formatter(record): # pylint: disable=unused-argument\n return mock_record_str\n\n mock_stdout = Mock()\n exporter = ConsoleLogExporter(out=mock_stdout, formatter=formatter)\n log_data = LogData(\n log_record=LogRecord(),\n instrumentation_scope=InstrumentationScope(\n \"first_name\", \"first_version\"\n ),\n )\n exporter.export([log_data])\n mock_stdout.write.assert_called_once_with(mock_record_str)", "def javaScriptConsoleMessage(self, message, line, source):\n\n super(GRobotWebPage, self).javaScriptConsoleMessage(message, line,\n source)\n log_type = \"error\" if \"Error\" in message else \"info\"\n getattr(logger, log_type)(\"%s(%d): %s\" % (source or '<unknown>', line, message))", "def test_debug_output(self):\n assert output(self.msg) is not None", "def redirect_stdout():\n save_stdout = sys.stdout\n sys.stdout = _TQDMFile(sys.stdout)\n yield\n sys.stdout = save_stdout", "def display_stdout_and_err_in_curr_cell(self):\n ipy_display(self.output_widget)", "def render_source(self, filename, obj):\n raise NotImplementedError()", "def javaScriptConsoleMessage(\n self, level, message, line_number, source_id\n ):\n logger.debug(\n \"[WebEngine] level=%s %s:%s %s\",\n level, source_id, line_number, message\n )", "def render(self):\n self.env.render()\n #input(\"Press enter to take a step \")", "def testStdoutReadDuringCapture(self):\n with self.OutputCapturer():\n print('foo')\n self.AssertOutputContainsLine('foo')\n print('bar')\n self.AssertOutputContainsLine('bar')\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar')", "def hook_exceptions():\n\n if hasattr(sys.stdout, \"fileno\"): # when testing, sys.stdout is StringIO\n # reopen stdout in non buffered mode\n sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)\n # set the hook\n sys.excepthook = traceback_formatter", "def print_out():\n pass", "def render(self, screen):\n pass", "def render(self, screen):\n pass", "def before_all(context):\n def run_next_action():\n \"\"\"Run Next-action and return both stderr and stdout.\"\"\"\n os.environ[\"BROWSER\"] = 'echo %s'\n result = subprocess.run(context.arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding=\"utf-8\")\n return result.stdout + result.stderr\n\n context.next_action = run_next_action\n subprocess.run([\"coverage\", \"erase\"])", "def define_inspect_outputs(self): # pragma: no cover\n func_sinker_dir = os.path.join(os.path.dirname(self.stage_dir), \"func_datasinker\")\n func_sinker_report = os.path.join(func_sinker_dir, \"_report\", \"report.rst\")\n\n if os.path.exists(func_sinker_report):\n\n func_outputs = get_pipeline_dictionary_outputs(\n func_sinker_report, self.output_dir\n )\n\n map_scale = \"default\"\n if self.config.log_visualization:\n map_scale = \"log\"\n\n if self.config.circular_layout:\n layout = \"circular\"\n else:\n layout = \"matrix\"\n\n mat = func_outputs[\"func.@connectivity_matrices\"]\n\n if isinstance(mat, str):\n if \"gpickle\" in mat:\n con_name = os.path.basename(mat).split(\".\")[0].split(\"_\")[-1]\n if os.path.exists(mat):\n self.inspect_outputs_dict[\n \"ROI-average time-series correlation - Connectome %s\"\n % os.path.basename(mat)\n ] = [\n \"showmatrix_gpickle\",\n layout,\n mat,\n \"corr\",\n \"False\",\n self.config.subject + \" - \" + con_name + \" - Correlation\",\n map_scale,\n ]\n else:\n for mat in func_outputs[\"func.@connectivity_matrices\"]:\n if \"gpickle\" in mat:\n con_name = os.path.basename(mat).split(\".\")[0].split(\"_\")[-1]\n if os.path.exists(mat):\n self.inspect_outputs_dict[\n \"ROI-average time-series correlation - Connectome %s\"\n % con_name\n ] = [\n \"showmatrix_gpickle\",\n layout,\n mat,\n \"corr\",\n \"False\",\n self.config.subject\n + \" - \"\n + con_name\n + \" - Correlation\",\n map_scale,\n ]\n\n self.inspect_outputs = sorted(\n [key for key in list(self.inspect_outputs_dict.keys())], key=str.lower\n )", "def write_output(self):", "def flush(self) -> None:\n if not self._buffer:\n # Only flush stdout buffer. (It could be that Python still has\n # something in its buffer. -- We want to be sure to print that in\n # the correct color.)\n self.stdout.flush()\n return\n\n data = \"\".join(self._buffer)\n\n if _DEBUG_RENDER_OUTPUT:\n self.LOG.write((\"%r\" % data).encode(\"utf-8\") + b\"\\n\")\n self.LOG.flush()\n\n # Print characters one by one. This appears to be the best solution\n # in order to avoid traces of vertical lines when the completion\n # menu disappears.\n for b in data:\n written = DWORD()\n\n retval = windll.kernel32.WriteConsoleW(\n self.hconsole, b, 1, byref(written), None\n )\n assert retval != 0\n\n self._buffer = []", "def test_main_output(self, capsys):\n args = self.args.copy()\n args[\"out_file\"] = \"text.txt\"\n UI.main(**args)\n captured = capsys.readouterr().out\n assert \"Results written to text.txt\" in captured", "def on_line(self, stream_name, line):\n if stream_name == 'stdout':\n self.stdout.write(line)\n elif stream_name == 'stderr':\n self.stderr.write(line)", "def report(self, obj, message, linenum, char_offset=0):\n self.controller.report(linenumber=linenum, filename=obj.path,\n severity=self.severity, message=message,\n rulename = self.__class__.__name__,\n char=char_offset)", "def render(self, template, *args, **kwargs):\n self._render(template, sys.stdout, *args, **kwargs)", "def switchstdout(self):\n\t\tself.old_stdout = sys.stdout\n\t\tsys.stdout = result = StringIO()\n\t\treturn result", "def update_reporting(options, is_console_task, run_tracker):\r\n\r\n # Get any output silently buffered in the old console reporter, and remove it.\r\n old_outfile = run_tracker.report.remove_reporter('capturing').settings.outfile\r\n old_outfile.flush()\r\n buffered_output = old_outfile.getvalue()\r\n old_outfile.close()\r\n\r\n log_level = Report.log_level_from_string(options.log_level or 'info')\r\n color = not options.no_color\r\n timing = options.time\r\n cache_stats = options.time # TODO: Separate flag for this?\r\n\r\n if options.quiet or is_console_task:\r\n console_reporter = QuietReporter(run_tracker,\r\n QuietReporter.Settings(log_level=log_level, color=color))\r\n else:\r\n # Set up the new console reporter.\r\n settings = PlainTextReporter.Settings(log_level=log_level, outfile=sys.stdout, color=color,\r\n indent=True, timing=timing, cache_stats=cache_stats)\r\n console_reporter = PlainTextReporter(run_tracker, settings)\r\n console_reporter.emit(buffered_output)\r\n console_reporter.flush()\r\n run_tracker.report.add_reporter('console', console_reporter)\r\n\r\n if options.logdir:\r\n # Also write plaintext logs to a file. This is completely separate from the html reports.\r\n safe_mkdir(options.logdir)\r\n run_id = run_tracker.run_info.get_info('id')\r\n outfile = open(os.path.join(options.logdir, '%s.log' % run_id), 'w')\r\n settings = PlainTextReporter.Settings(log_level=log_level, outfile=outfile, color=False,\r\n indent=True, timing=True, cache_stats=True)\r\n logfile_reporter = PlainTextReporter(run_tracker, settings)\r\n logfile_reporter.emit(buffered_output)\r\n logfile_reporter.flush()\r\n run_tracker.report.add_reporter('logfile', logfile_reporter)", "def real_print(*args, **kwargs):\n\n kwargs.setdefault('file', real_stdout)\n _python_print_function(*args, **kwargs)", "def test_render_stop_display(self, get_formatted_stack_frame: MagicMock):\n get_formatted_stack_frame.return_value = [\n {'filename': 'foo'},\n {'filename': 'bar'},\n {'filename': os.path.realpath(exposed.__file__)}\n ]\n step = MagicMock()\n exposed.render_stop_display(step, 'FAKE')\n self.assertEqual(1, step.report.append_body.call_count)", "def addSource(self, source):\n self.tprint('source ' + source)", "def showDebugSource(self, fn, line):\n if not fn.startswith('<'):\n self.openSourceFile(fn, line)\n self.setFileLine(fn, line)", "def tprint(self, cmd, end='\\n'):\n if ENABLE_DEBUG:\n stackIndex = 0\n for index, stackFrame in enumerate(stack()):\n caller = getframeinfo(stackFrame[0])\n if caller.filename == fullPath:\n stackIndex = index\n break \n caller = getframeinfo(stack()[stackIndex][0])\n self.fileHandle.write(\"# \" + targetFile + \":\" + str(caller.lineno) + '\\n')\n self.tprint_raw(cmd, end)", "def on_begin(self, args, kwargs):\n self.stdout = open(self.stdout_path, \"wb\")\n self.stderr = open(self.stderr_path, \"wb\")", "def do_inspect_with_source(self, arg):\n self._do_inspect(arg, with_source=True)", "def result(self, step):\n indent_extra = 0\n if self.current_rule:\n indent_extra = self.indent_size\n\n step = self.steps.pop(0)\n indent = make_indentation(2 * self.indent_size + indent_extra)\n if self.show_aligned_keywords:\n # -- RIGHT-ALIGN KEYWORDS (max. keyword width: 6):\n text = u\"%s%6s %s ... \" % (indent, step.keyword, step.name)\n else:\n text = u\"%s%s %s ... \" % (indent, step.keyword, step.name)\n self.stream.write(text)\n\n status_text = step.status.name\n if self.show_timings:\n status_text += \" in %0.3fs\" % step.duration\n\n unicode_errors = 0\n if step.error_message:\n try:\n self.stream.write(u\"%s\\n%s\\n\" % (status_text, step.error_message))\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s\\n\" % status_text)\n self.stream.write(u\"%s while writing error message: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n else:\n self.stream.write(u\"%s\\n\" % status_text)\n\n if self.show_multiline:\n if step.text:\n try:\n self.doc_string(step.text)\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s while writing docstring: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n if step.table:\n self.table(step.table)", "def test_write_to_console_fail(self, _step: PropertyMock):\n _step.return_value = None\n step = exposed.ExposedStep()\n with self.assertRaises(ValueError):\n step.write_to_console('hello')", "def display():\n\n # Check the pipe setup.\n check_pipe_setup(sequence=True, j=True)\n\n # Call the write method with sys.stdout as the file.\n write(file=sys.stdout)", "def __exit__(self, exc_type, exc_val, exc_tb):\n sys.stdout.flush()\n sys.stdout.close()\n sys.stdout = sys.__stdout__", "def dbtrace_show_output(trace_object, output_file):\n\n pass", "def report():\n pass", "def reporter(self, lnum, col, text, check):\n self.output.append([lnum, col, text])", "def test_print_end(self):\n response = support.create_project(self, 'madison')\n self.assertFalse(\n response.failed,\n Message('should have created project', response=response)\n )\n\n print_string = string.ascii_lowercase\n\n code = '\\n'.join([\n 'import cauldron as cd',\n 'cd.display.text(\"Hello World\")',\n 'print(\"{}\")'.format(print_string)\n ])\n\n support.add_step(self, contents=code)\n\n response = support.run_command('run -f')\n self.assertFalse(\n response.failed,\n Message('should have run step', response=response)\n )\n\n project = cauldron.project.get_internal_project()\n dom = project.steps[1].dom # type: str\n\n self.assertEqual(\n dom.count(print_string),\n 2,\n 'should have printed ascii lowercase'\n )", "def output_raw(self, string_to_output):\n html = plain_to_html(string_to_output)\n if html == \"\":\n return\n html_pre_output = html_pre(html)\n\n self._output_object.add_report(html_pre_output)", "def redirect_stdout(new_target=None):\n\n if not new_target:\n new_target = StringIO()\n\n _ = sys.stdout\n try:\n sys.stdout = new_target\n yield new_target\n finally:\n sys.stdout = _", "def run_report_generation(**kwargs):\n out = run_python_script_helper(\n os.path.dirname(__file__), \"report_generation_example.py\", **kwargs\n )\n return out", "def print_report(self, obj):\n return mark_safe(obj.report)", "def reports_cli():", "def stdout2Log(self):\n sys.stdout = self\n sys.stderr = self\n return", "def report_bug(self, line):\n # Store the output of our command\n with capture_output() as shell_output:\n self.shell.run_cell(line)\n # Show the capture output to the user\n shell_output.show()\n # Create the report\n report_bug(prior_commands=1, captured_output=shell_output.stdout)", "def testStdoutAndStderr(self):\n with self.OutputCapturer():\n print('foo')\n print('bar', file=sys.stderr)\n self.AssertOutputContainsLine('foo')\n self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)", "def read_stdout(self, dt):\n\n self.temp_stdout += self.temp_output\n self.ids[\"txt_code_output\"].text = self.temp_output", "def render(self):\n self.rendering = True\n self.env.render()", "def dspyRender(self):\n pass", "def test_capture_stdout_works_with_print(self):\n with debug_env:\n with captured_stdout() as stdout:\n print(\"wibble\")\n\n self.assertIn(\"wibble\", stdout.getvalue())", "def write_line(self, line):\n # TODO(iannucci): have step_runner log the step metadata as a protobuf\n # and/or put it in the Step proto message.\n return self.logging.write_line(line)", "def pytest_runtest_makereport(item, call): # pylint: disable=unused-argument\n pytest_html = item.config.pluginmanager.getplugin(\"html\")\n outcome = yield\n report = outcome.get_result()\n extra = getattr(report, \"extra\", [])\n driver_manager = DriverManager()\n xfail = hasattr(report, \"wasxfail\")\n\n if report.when == \"call\":\n extra.append(pytest_html.extras.url(driver_manager.driver.current_url))\n if (report.skipped and xfail) or (report.failed and not xfail):\n extra.append(pytest_html.extras.html(\"<div>Additional HTML</div>\"))\n screenshot = driver_manager.driver.get_screenshot_as_base64()\n extra.append(pytest_html.extras.image(screenshot, \"Screenshot\"))\n report.extra = extra", "def stdio(self):\n\n if isinstance(self.log_file, TotalLogFile):\n self.stdio_stolen = True\n self.log_file.stdio()", "def disable_print_statements_on_console(func):\n\n @wraps(func)\n def wrap(*args, **kw):\n suppress_text = io.StringIO()\n sys.stdout = suppress_text\n result = func(*args, **kw)\n sys.stdout = sys.__stdout__\n return result\n\n return wrap", "def testRunSmoke(self):\n stage = self.ConstructStage()\n with self.OutputCapturer():\n stage.Run()", "def on(self):\n self._current_stream = self._stdout", "def test_display__method(self):\n Rectangle.reset_objects()\n s1 = Square(5)\n f = io.StringIO()\n with contextlib.redirect_stdout(f):\n s1.display()\n self.assertEqual(f.getvalue(), \"#####\\n#####\\n#####\\n#####\\n#####\\n\")", "def console(self):\n fricas_console()", "def console_runsource(self, source, filename=None):\n\n\t\tif filename is None:\n\t\t\tfilename = '<remote_console_input>'\n\n\t\t# Inject a global variable to capture expression result.\n\t\t# This implies the fix for http://dev.licorn.org/ticket/582\n\t\tself._console_namespace['_console_result_'] = None\n\n\t\ttry:\n\t\t\t# In case of an expression, capture result.\n\t\t\tcompile(source, filename, 'eval')\n\t\t\tsource = '_console_result_ = ' + source\n\n\t\texcept SyntaxError:\n\t\t\tpass\n\n\t\tmore = self._console_interpreter.runsource(source, filename)\n\t\tresult = self._console_namespace.pop('_console_result_')\n\n\t\tif more is True:\n\t\t\t# nothing to display, just return, for the remote side to continue.\n\t\t\treturn True, ''\n\n\t\toutput = self._console_interpreter.output_buffer\n\t\tself._console_interpreter.output_buffer = ''\n\n\t\tif result is not None:\n\t\t# NOTE: don't pprint, it avoids the ascii-escaped strings to be\n\t\t# interpreted correctly.\n\t\t#\tresult = pprint.pformat(result)\n\t\t\toutput += str(result) + '\\n'\n\n\t\treturn False, output", "def write(self,s):\n if self.passthrough:\n sys.stdout=sys.__stdout__\n print s\n sys.stdout=sys.self\n return \n cb=self._determine_context_buffer(s)\n cb.write(s)\n self.dump()", "def collect_output(self):\n pass", "def collect_output(self):\n pass", "def test_stdout_log(self, logger: Logger) -> None:\n task = OctaveTask()\n task.session_id = \"123\"\n handler = OutputHandler(task)\n logger.addHandler(handler)\n\n # Write something to the log\n msg = \"I am a message\"\n logger.info(msg)\n\n assert len(handler.contents) == 1\n assert handler.messages() == msg", "async def main(self) -> None:\n self.register()\n with patch_stdout():\n print_task = asyncio.create_task(self.print_processor())\n try:\n await self.interactive_shell()\n finally:\n print_task.cancel()", "def redirect_stdout(fn):\n @wraps(fn)\n def wrapper(*args, **kwargs):\n original_stdout = sys.stdout\n out = BytesIO()\n try:\n sys.stdout = out\n return fn(out, *args, **kwargs)\n finally:\n sys.stdout = original_stdout\n return wrapper", "def test_send_line(self):\n # Required to get useful test names\n super(TestCisAsciiTableOutput_local, self).test_send_line()", "def process_screening(self):\n\n self.control.processing_mode = \"screening\"\n self.process_loggers()", "def run(self) -> None:\n self._render()\n print(self.sio.getvalue())", "def test_updated_display1(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r1 = Rectangle(2, 3, 2, 2)\n r1.display()\n sys.stdout = sys.__stdout__\n desired = '\\n\\n ##\\n ##\\n ##\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def test_display_method2(self):\n capturedOutput = io.StringIO()\n sys.stdout = capturedOutput\n r2 = Rectangle(2, 2)\n r2.display()\n sys.stdout = sys.__stdout__\n desired = '##\\n##\\n'\n self.assertEqual(capturedOutput.getvalue(), desired)", "def report(self, output_dir):", "def print_outcome(self) -> None:\n pass", "def capture_print():\n\n old_streams = sys.stdout, sys.stderr\n sys.stdout = sys.stderr = io.StringIO()\n filestring = FileString(sys.stdout)\n try:\n yield filestring\n finally:\n sys.stdout, sys.stderr = old_streams\n filestring.read()", "def stdout(self):\n if self.dm.fileExists(self.proc):\n try:\n t = self.dm.pullFile(self.proc)\n except DMError:\n # we currently don't retry properly in the pullFile\n # function in dmSUT, so an error here is not necessarily\n # the end of the world\n return ''\n newLogContent = t[self.stdoutlen:]\n self.stdoutlen = len(t)\n # Match the test filepath from the last TEST-START line found in the new\n # log content. These lines are in the form:\n # 1234 INFO TEST-START | /filepath/we/wish/to/capture.html\\n\n testStartFilenames = re.findall(r\"TEST-START \\| ([^\\s]*)\", newLogContent)\n if testStartFilenames:\n self.lastTestSeen = testStartFilenames[-1]\n return newLogContent.strip('\\n').strip()\n else:\n return ''", "def test_print_start(self):\n response = support.create_project(self, 'chicago')\n self.assertFalse(\n response.failed,\n Message('should have created project', response=response)\n )\n\n print_string = string.ascii_lowercase\n\n code = '\\n'.join([\n 'import cauldron as cd',\n 'print(\"{}\")'.format(print_string),\n 'cd.display.text(\"Hello World\")'\n ])\n\n support.add_step(self, contents=code)\n\n response = support.run_command('run -f')\n self.assertFalse(\n response.failed,\n Message('should have run step', response=response)\n )\n\n project = cauldron.project.get_internal_project()\n dom = project.steps[1].dom # type: str\n\n self.assertEqual(\n dom.count(print_string),\n 2,\n 'should have printed ascii lowercase'\n )", "def _render(self) -> None:\n\n # We rebind print to print to our StringIO instance for the scope of\n # this method.\n prn = partial(print, file=self.sio)\n\n def print_binding(print_fn: Callable[[str], None], b: Binding) -> None:\n print_fn('{}{} = {}'.format(\n name_repr(b),\n self.loc_image(b.gen_name),\n self.value_image(b.gen_name)\n ))\n\n if self.state is None:\n prn('Selected frame is not in a property.')\n return\n\n # If we are asked to display only one variable, look for it, print it,\n # and stop there.\n if self.var_name:\n for scope_state in self.state.scopes:\n for b in scope_state.bindings:\n if b.dsl_name == self.var_name:\n print_binding(prn, b)\n return\n prn('No binding called {}'.format(self.var_name))\n return\n\n prn('Running {}'.format(prop_repr(self.state.property)))\n if self.state.property.dsl_sloc:\n prn('from {}'.format(self.state.property.dsl_sloc))\n\n if self.state.in_memoization_lookup:\n prn('About to return a memoized result...')\n\n for scope_state in self.state.scopes:\n\n def print_binding_cb(strn: str) -> None:\n prn(indent(strn, 2))\n\n for b in scope_state.bindings:\n print_binding(print_binding_cb, b)\n\n done_exprs, last_started = scope_state.sorted_expressions()\n\n for e in done_exprs:\n prn(' {}{} -> {}'.format(\n expr_repr(e),\n self.loc_image(e.result_var),\n self.value_image(e.result_var, subsequent_indent=\" \")\n ))\n\n if last_started:\n prn('')\n prn('Currently evaluating {}'.format(\n expr_repr(last_started)\n ))\n if last_started.dsl_sloc:\n prn('from {}'.format(last_started.dsl_sloc))" ]
[ "0.7027907", "0.6089574", "0.59871966", "0.5953501", "0.5823776", "0.58008", "0.5722767", "0.569426", "0.5688539", "0.5679437", "0.56706893", "0.5645537", "0.5632503", "0.5632419", "0.5579383", "0.55504066", "0.54777354", "0.5475465", "0.547519", "0.5462693", "0.5446832", "0.54348505", "0.5432767", "0.5431265", "0.5427429", "0.54177105", "0.5372156", "0.53701395", "0.53401256", "0.53392524", "0.533689", "0.53243184", "0.53236115", "0.53138304", "0.5304812", "0.53011864", "0.53011864", "0.52937835", "0.5285709", "0.52719724", "0.5269661", "0.5267386", "0.525091", "0.52452135", "0.5233242", "0.5211833", "0.5205651", "0.5199804", "0.5181386", "0.5180629", "0.51691777", "0.5166115", "0.51656806", "0.5160664", "0.5159221", "0.5150852", "0.51502824", "0.51494306", "0.5147133", "0.51435035", "0.5133872", "0.51321405", "0.5128952", "0.51240146", "0.5120518", "0.5120015", "0.5119417", "0.511602", "0.5114821", "0.5112912", "0.5105254", "0.5096133", "0.50941443", "0.5093106", "0.5086255", "0.50678575", "0.50672823", "0.5066479", "0.50628966", "0.50624675", "0.50605226", "0.5060508", "0.5059685", "0.5057665", "0.5057613", "0.5057613", "0.5048427", "0.5036514", "0.5023524", "0.50220877", "0.50197303", "0.5016992", "0.50160027", "0.5005868", "0.50058323", "0.5005747", "0.5001278", "0.50010127", "0.4997402", "0.49943125" ]
0.75739104
0
Should raise a ValueError when there is no current step to operate upon by the write function call.
def test_write_to_console_fail(self, _step: PropertyMock): _step.return_value = None step = exposed.ExposedStep() with self.assertRaises(ValueError): step.write_to_console('hello')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _step(self) -> None:", "def step(self):\r\n raise NotImplementedError", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError(\n f'{self.__class__.__name__} must implement a `step` method.'\n )", "def step(self):\n raise NotImplementedError", "def _step(self):\n pass", "def after_step():\n raise NotImplementedError", "def record(self, step):", "def do_step(self) -> None:", "def _report_step(self, learning_rate, step, train_stats=None,\n valid_stats=None):\n if self.report_manager is not None:\n return self.report_manager.report_step(\n learning_rate, step, train_stats=train_stats,\n valid_stats=valid_stats)", "def _handle_write(self):\n pass", "def result(self, step):\n indent_extra = 0\n if self.current_rule:\n indent_extra = self.indent_size\n\n step = self.steps.pop(0)\n indent = make_indentation(2 * self.indent_size + indent_extra)\n if self.show_aligned_keywords:\n # -- RIGHT-ALIGN KEYWORDS (max. keyword width: 6):\n text = u\"%s%6s %s ... \" % (indent, step.keyword, step.name)\n else:\n text = u\"%s%s %s ... \" % (indent, step.keyword, step.name)\n self.stream.write(text)\n\n status_text = step.status.name\n if self.show_timings:\n status_text += \" in %0.3fs\" % step.duration\n\n unicode_errors = 0\n if step.error_message:\n try:\n self.stream.write(u\"%s\\n%s\\n\" % (status_text, step.error_message))\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s\\n\" % status_text)\n self.stream.write(u\"%s while writing error message: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n else:\n self.stream.write(u\"%s\\n\" % status_text)\n\n if self.show_multiline:\n if step.text:\n try:\n self.doc_string(step.text)\n except UnicodeError as e:\n unicode_errors += 1\n self.stream.write(u\"%s while writing docstring: %s\\n\" % \\\n (e.__class__.__name__, e))\n if self.RAISE_OUTPUT_ERRORS:\n raise\n if step.table:\n self.table(step.table)", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def step(self, step=None):\n pass", "def test_save_npy_with_invalid_step(temp_dir):\n data = np.array([[1, 2, 3], [4, 5, 6]])\n\n with pytest.raises(ValueError):\n save_npy(temp_dir, data, step={\"invalid\": \"dict\"})", "def report_step_progress(self, step):\n dot_status = self.dot_status[step.status.name]\n if step.status == Status.failed:\n if (step.exception and\n not isinstance(step.exception, AssertionError)):\n # -- ISA-ERROR: Some Exception\n dot_status = self.dot_status[\"error\"]\n step.feature = self.current_feature\n step.scenario = self.current_scenario\n self.failures.append(step)\n self.stream.write(dot_status)\n self.stream.flush()", "def getCurrentStep():", "def simulation_step(self):\n if self.data_valid.get():\n print(\"Output pin %s writing %s\" % (self.name, self.debug_data.get()))", "def test_failed_glue(self):\n sink = self.tool.glue(self.line, self.head, (90, 50))\n self.assertTrue(sink is None)", "def step(self):\n\n pass", "def _step(self, whence):\n pass", "def handle_write(self):\n pass", "def test_write_to_console(self, _step: PropertyMock):\n trials = [2, True, None, 'This is a test', b'hello']\n\n for message in trials:\n _step_mock = MagicMock()\n write_source = MagicMock()\n _step_mock.report.stdout_interceptor.write_source = write_source\n _step.return_value = _step_mock\n step = exposed.ExposedStep()\n step.write_to_console(message)\n\n args, kwargs = write_source.call_args\n self.assertEqual('{}'.format(message), args[0])", "def on_step_end(self, step, logs={}):\n self.total_steps += 1\n if self.total_steps % self.interval != 0:\n # Nothing\n return\n\n filepath = self.filepath.format(step=self.total_steps, **logs)\n if self.verbose > 0:\n print('\\nStep {}: saving kmodel to {}'.format(self.total_steps,\n filepath))\n self.kmodel.save(filepath)", "def test_write_skips_invalid_rows(self):\n writer = BaseTSVWriter([\n ('Prop1', Mock(side_effect=InvalidTsvRowException)),\n ])\n\n row = NonCallableMock()\n\n valid, invalid = writer.write(self.tsv_file, [row])\n\n assert valid == []\n assert invalid == [row]\n assert self.tsv_value == \"Prop1\\r\\n\"", "def report_step_progress(self, step):\n pass", "def test_step_out_of_bounds_indices(self):\n _, backend = _collect_episode_data(num_episodes=6)\n data_reader = in_memory_backend.InMemoryBackendReader(backend)\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n len(data_reader.steps))\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n -len(data_reader.steps) - 1)", "def create_step(self, step):\n raise NotImplementedError", "def perform_step(self) -> None:\n pass", "def write_line(self, line):\n # TODO(iannucci): have step_runner log the step metadata as a protobuf\n # and/or put it in the Step proto message.\n return self.logging.write_line(line)", "def step(self) -> bool:\n raise NotImplementedError()", "def test_save_json_with_invalid_step(temp_dir):\n data = json.dumps({\"k\": \"v\", \"list\": [1, 2, 3]})\n\n with pytest.raises(ValueError):\n save_json(temp_dir, data, step={\"invalid\": \"dict\"})", "def step(self, **kwargs):\n pass", "def finishWriting(self, x=None):\n\t\tself.finishedWriting = True", "def _RaiseIfNotWritable(self):\n if not self._storage_file:\n raise IOError('Unable to write to closed storage writer.')", "def finish_writing(self):\n if self.read_option('check_consistency'):\n self._check_write_consistency()", "def step(self, sess, step):\n\t\tif self.is_training:\n\t\t\tloss, optim, summaries = sess.run(\n\t\t\t\t\t[self.loss, self.optim, self.summary_op])\n\t\t\tself.writer.add_summary(summaries, global_step=step)\n\t\telse:\n\t\t\tse = sess.run([self.se])[0]\n\n\t\t\treturn se", "def test_step_out_of_bounds_indices(self):\n _, data_directory = self._collect_episode_data(\n num_episodes=6, max_episodes_per_file=3)\n with riegeli_backend_reader.RiegeliBackendReader(\n data_directory) as data_reader:\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n len(data_reader.steps))\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n -len(data_reader.steps) - 1)", "def check_step(step):\n assert isinstance(step, list), \"Step must be a list\"\n assert (len(step) == 3 or len(step) == 4), \\\n \"Step must be a list of length 3 or 4 (to include temporary values)\"\n assert isinstance(step[0], type), (\n \"The first element of the step \"\n \"must be a class (e.g. measurement or a calibration routine)\")\n assert isinstance(step[2],\n dict), (\"The second element of the step \"\n \"must be a dictionary containing settings\")", "def _save(self, step, model):\n\n raise NotImplementedError()", "def _maybe_save(self, step):\n if self.model_saver is not None:\n self.model_saver.maybe_save(step)", "def writePosFilesStep(self): \n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getUntilted())\n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getTilted())", "def _save(self, step):\n\n output_path = self.output_path + '/checkpoints/'\n if not os.path.isdir(output_path):\n os.makedirs(output_path)\n self.saver.save(self.session, save_path=output_path,global_step=step)", "def test_step_end(self, output: Optional[_STEP_OUTPUT_TYPE]) -> \\\n Optional[_STEP_OUTPUT_TYPE]:\n output = to_cpu(output)\n\n return super().test_step_end(output)", "def __verify_steps(self):\n if self.major[2] not in self.data[self.root]:\n self.data[self.root][self.major[2]] = {\"step\": []}\n elif not isinstance(self.data[self.root][self.major[2]][\"step\"], list):\n self.data[self.root][self.major[2]][\"step\"] = [self.data[self.root][self.major[2]][\"step\"]]", "def write():\n pass", "def write(self):\n\t\traise NotImplementedError('%s: No write function implemented!' % self.name)", "def save_step(self, step: 'TensorflowV1ModelStep', context: 'ExecutionContext') -> 'BaseStep':\n with step.graph.as_default():\n saver = tf.train.Saver()\n saver.save(step.session, self._get_saved_model_path(context, step))\n step.strip()\n\n return step", "def step(self):\n return self._step", "def handle_robot_step_changed(self, step):\n\n #Save the last step if some lost\n last_known_step = self.step\n super(WeldTask, self).handle_robot_step_changed(step)\n\n if step < 0 or step >= len(self.welding_parameters):\n # invalid step\n return\n\n if self.job is None:\n # no jobs\n return\n\n if self.welding_parameters[step] == WeldingState():\n # default state, skip\n return\n\n if last_known_step > step:\n # moving to the other direction\n return\n\n if self.welding_parameters[last_known_step] != self.welding_parameters[step]:\n # if there is a difference, send the new params\n RosProxy().call_service(\n '/welding_driver/set_params',\n SetWeldingParameters,\n self.welding_parameters[step])", "def writer(data):\n if len(data) == 0:\n return\n try:\n # Call user write function:\n if write_function != None:\n write_function(data)\n except Exception as e:\n # Propagate error if the user function caused an error:\n presented_output_store[\"user_write_error\"] = str(e)\n try:\n process.kill()\n except Exception:\n pass\n presented_output_store[\"data\"] += data", "def value(self, step):\n raise NotImplementedError", "def write(self):\n raise NotImplementedError", "def test_cannot_write_file(self):\n self.api.write_data('/some-fake/path/to-create-file/', 'some-string')", "def step_write_variables_forward(self):\n \n for write_variable in self.write_variables.values():\n write_variable.step_forward()", "def save_feedback_batch_logs(self, writer, step, name, no_gradient=False,\n init=False):\n if not init:\n if not no_gradient and self.reconstruction_loss is not None:\n writer.add_scalar(\n tag='{}/reconstruction_loss'.format(name),\n scalar_value=self.reconstruction_loss,\n global_step=step)\n else:\n if not no_gradient and self.reconstruction_loss is not None:\n writer.add_scalar(\n tag='{}/reconstruction_loss_init'.format(name),\n scalar_value=self.reconstruction_loss,\n global_step=step)", "def test_valid_write_error(self, mock_progress):\n mock_initial_status = DirectoryStatus.ERROR\n mock_expected_status = DirectoryStatus.ERROR\n mock_message = \"message\"\n stub_dir_status = self.StubDirectoryStatus()\n stub_dir_status.run_id = None # Upload has not started\n # mock main call to test\n mock_progress.write_directory_status.side_effect = [None]\n # run function\n upload_helpers._set_and_write_directory_status(stub_dir_status, mock_initial_status, mock_message)\n # verify write\n stub_dir_status.status = mock_expected_status\n mock_progress.write_directory_status.assert_called_with(stub_dir_status)", "def step_forward(self):\n self.read_value = self.write_value\n self.write_value = None", "def getStep():\n # TODO: can there be non-Step logs?", "def step ( self ) :\n return self.__step", "def test_removed_step(raw_frame):\n data = DataSteps(raw_frame)\n\n @data.step\n def inc_col1(frame):\n return frame.assign(Col1=lambda df: df[\"Col1\"] + 1)\n\n @data.step(active=False) # noqa: F811\n def inc_col1(frame):\n return frame.assign(Col1=lambda df: df[\"Col1\"] + 1)\n\n assert len(data.steps) == 0", "def chunk_written(self):\n pass", "def test_write(self):\n writer = BaseTSVWriter([\n ('Prop1', 'prop1'),\n ('Prop2', 'prop2'),\n ])\n\n row = NonCallableMock(\n prop1=145,\n prop2=None,\n )\n\n valid, invalid = writer.write(self.tsv_file, [row])\n\n assert valid == [row]\n assert invalid == []\n assert self.tsv_value == (\n \"Prop1\\tProp2\\r\\n\"\n \"145\\t\\r\\n\" # None should convert to an empty string\n )", "def _handle_write_event(self):\n log.warning(\"Received write event for %r.\" % self)", "async def on_step(self, iteration: int):\n raise NotImplementedError", "def step(self, d=1):\n raise NotImplementedError()", "def _prey_step(self):\n raise NotImplementedError()", "def _validate(self):\n\n if not self.definition_func or not callable(self.definition_func):\n raise RadishError(\"The step '{0}' does not have a step definition\".format(self.sentence))", "def testRaiseIfNotWritable(self):\n storage_writer = writer.StorageWriter()\n\n with self.assertRaises(IOError):\n storage_writer._RaiseIfNotWritable()", "def test_second_step_strict(self):\n with self.assertRaises(Exception):\n self.run_step('S02-errors.py', allow_failure=False)", "def step_impl(context):\n pass", "def step_impl(context):\n pass", "def TestOneStep(self):\n pass", "def test_silent_write_errors():\n\n tracker = pawprint.Tracker(db=None, table=None)\n\n try:\n tracker.write(event=\"This will fail silently.\")\n except Exception:\n pytest.fail(\"Failed to fail silently.\")", "def eval_step(self, *args, **kwargs):\n raise NotImplementedError", "def next_step(self):\n if self.time_point + 1 >= len(self.data):\n print(\"Error: at last time point\")\n else:\n self.time_point = self.time_point + 1\n self.load_frame()", "def save_on_host(\n self, host_outputs: Any, writer: SummaryWriter, step: int\n ) -> None:\n raise NotImplementedError", "def _MaybeWriteSummary(self, sess, global_step, values, outfeeds):\n self._eval_metrics.PackMetricsValues(values)\n eval_metrics = self._eval_metrics.metrics\n\n if self._ShouldWriteSummary(global_step):\n step_rate, example_rate, total_examples = (\n self._step_rate_tracker.ComputeStepRate(\n global_step,\n eval_metrics['num_samples_in_batch'][0] * self._steps_per_loop))\n self._SummarizeValue(global_step, 'global_step/sec', step_rate)\n self._SummarizeValue(global_step, 'examples/sec', example_rate)\n self._SummarizeValue(global_step, 'total_samples', total_examples)\n self._SummarizeValue(global_step, 'total_num_params',\n self._total_num_params)\n status_strs = []\n for key, (val, _) in sorted(eval_metrics.items()):\n self._SummarizeValue(global_step, key, val)\n tf.logging.info((global_step, key, val))\n status_strs.append(f'{key}={val}')\n self.SetStatusMessage('Executing train program at step %d %s' %\n (global_step, ','.join(status_strs)))\n\n # TODO(laigd): Not all `ProcessFPropResults` work in Eager.\n if not py_utils.IsEagerMode() or py_utils.RunProcessFPropResultsInEager():\n summaries = self._task.ProcessFPropResults(sess,\n self._GetGlobalStep(sess),\n eval_metrics, outfeeds)\n if not py_utils.IsEagerMode():\n self._WriteSummaries(\n os.path.basename(self._program_dir), global_step, summaries)\n self._summary_writer.flush()", "def write(self, *, global_step: int, new_line: bool = True):\n for w in self.__writers:\n w.write(global_step=global_step,\n queues=self.queues,\n histograms=self.histograms,\n pairs=self.pairs,\n scalars=self.scalars,\n tf_summaries=self.tf_summaries)\n self._write_to_screen(new_line=new_line)\n self._clear_stores()", "def test_invalid_write(self, mock_progress):\n mock_status = \"status\"\n mock_message = \"message\"\n stub_dir_status = self.StubDirectoryStatus()\n # mock main call to test\n mock_progress.side_effect = progress.exceptions.DirectoryError(\"\", \"\")\n # run function\n with self.assertRaises(progress.exceptions.DirectoryError):\n upload_helpers._set_and_write_directory_status(stub_dir_status, mock_status, mock_message)", "def save_logs(self, writer, step, name, no_gradient=False,\n no_fb_param=False):\n forward_weights_norm = torch.norm(self.weights)\n writer.add_scalar(tag='{}/forward_weights_norm'.format(name),\n scalar_value=forward_weights_norm,\n global_step=step)\n if self.weights.grad is not None:\n forward_weights_gradients_norm = torch.norm(self.weights.grad)\n writer.add_scalar(tag='{}/forward_weights_gradients_norm'.format(name),\n scalar_value=forward_weights_gradients_norm,\n global_step=step)\n if self.bias is not None:\n forward_bias_norm = torch.norm(self.bias)\n\n writer.add_scalar(tag='{}/forward_bias_norm'.format(name),\n scalar_value=forward_bias_norm,\n global_step=step)\n if self.bias.grad is not None:\n forward_bias_gradients_norm = torch.norm(self.bias.grad)\n writer.add_scalar(tag='{}/forward_bias_gradients_norm'.format(name),\n scalar_value=forward_bias_gradients_norm,\n global_step=step)\n if not no_fb_param:\n feedback_weights_norm = torch.norm(self.feedbackweights)\n writer.add_scalar(tag='{}/feedback_weights_norm'.format(name),\n scalar_value=feedback_weights_norm,\n global_step=step)\n if self.feedbackbias is not None:\n feedback_bias_norm = torch.norm(self.feedbackbias)\n writer.add_scalar(tag='{}/feedback_bias_norm'.format(name),\n scalar_value=feedback_bias_norm,\n global_step=step)\n\n if not no_gradient and self.feedbackweights.grad is not None:\n feedback_weights_gradients_norm = torch.norm(\n self.feedbackweights.grad)\n writer.add_scalar(\n tag='{}/feedback_weights_gradients_norm'.format(name),\n scalar_value=feedback_weights_gradients_norm,\n global_step=step)\n if self.feedbackbias is not None:\n feedback_bias_gradients_norm = torch.norm(\n self.feedbackbias.grad)\n writer.add_scalar(\n tag='{}/feedback_bias_gradients_norm'.format(name),\n scalar_value=feedback_bias_gradients_norm,\n global_step=step)", "def _check_write_consistency(self):\n self.logger.warning('Not checking write consistency')", "def get_steps_num():\n return 0", "def _step_callback_gen(self):\n\n with open(self.output_filename, 'wb') as f: # Note: need to use wb since windows\n w = csv.writer(f, lineterminator=os.linesep) # Also lineterminator=os.linesep for cross platform compatibility\n\n while True:\n sensor, step_samples = (yield) # Wait for step_callback to send us the a sample\n\n if not self._headers_written: \n w.writerow(sensor.get_csv_headers())\n self._headers_written = True\n \n if self.sim.data_logging_enabled(self, sensor):\n for sample in step_samples:\n w.writerow(list(sample)) # Note: each sample is assumed to be a namedtuple of some sort", "def peek_write(self):\n ...", "def test_writer_represents_missing_data_correctly(self, tmpdir, standard_gwas_parser_basic):\n reader = readers.IterableReader([\"1\\t100\\tA\\tC\\tNone\", \"2\\t200\\tA\\tC\\t.\"],\n parser=standard_gwas_parser_basic)\n expected_fn = tmpdir / 'test.txt'\n out_fn = reader.write(expected_fn, columns=['neg_log_pvalue'], make_tabix=False)\n\n assert expected_fn == out_fn\n assert os.path.isfile(out_fn), \"Output filename exists\"\n with open(out_fn, 'r') as f:\n assert f.readlines() == [\"#neg_log_pvalue\\n\", \".\\n\", \".\\n\"]", "def __call__(self, new_val, previous_val, step):\n\t\treturn", "def test_missing_arg(self):\n with self.assertRaises(TypeError):\n self.r1.save_to_file()", "def step(self, state):", "def train_loop_post(self, current_step):\r\n pass", "def validation_step_end(self, output: Optional[_STEP_OUTPUT_TYPE]) -> \\\n Optional[_STEP_OUTPUT_TYPE]:\n output = to_cpu(output)\n\n return super().validation_step_end(output)", "def Step(self, *args):\n return _gmat_py.Propagator_Step(self, *args)", "def write(self, base_name, output_directory, integrator):\n msg = \"ReaderWriterBase::write called!\"\n raise NotImplementedError(msg)", "def execute_write(function):\n raise NotImplementedError(\"execute_write() has not been implemented\")", "def write(self):\n pass", "def write(self):\n pass", "def step(self, move):", "def getSteps():" ]
[ "0.60865235", "0.6002841", "0.59607214", "0.59607214", "0.59607214", "0.59400934", "0.5931165", "0.59239656", "0.59183896", "0.5840517", "0.57957006", "0.5792233", "0.57350117", "0.57000464", "0.5628943", "0.5625073", "0.5619831", "0.56141627", "0.5597384", "0.5548896", "0.5536715", "0.55229527", "0.5506704", "0.55008084", "0.54638463", "0.5455872", "0.5454955", "0.5438352", "0.54267335", "0.5387193", "0.537746", "0.53480506", "0.5336787", "0.53302956", "0.53277093", "0.5325222", "0.531655", "0.5304709", "0.52811164", "0.52561724", "0.5244142", "0.52369916", "0.5209383", "0.5207783", "0.52017695", "0.51958036", "0.5181842", "0.51732504", "0.5164862", "0.51645994", "0.5161189", "0.51575357", "0.5149549", "0.5140184", "0.5124932", "0.511402", "0.5111673", "0.51106524", "0.5108114", "0.51076365", "0.51056606", "0.5087793", "0.5086579", "0.5081551", "0.5080867", "0.5077618", "0.5057521", "0.5048064", "0.50437737", "0.5042299", "0.50421566", "0.50397784", "0.50392187", "0.50392187", "0.50363886", "0.50299656", "0.5029572", "0.5028877", "0.50274384", "0.50266916", "0.50231576", "0.50219786", "0.50182027", "0.5013869", "0.4999699", "0.49925935", "0.49908248", "0.49836394", "0.4976727", "0.49716932", "0.496978", "0.49652493", "0.49613047", "0.49491805", "0.49491113", "0.49414694", "0.49341893", "0.49341893", "0.493358", "0.4926721" ]
0.6328565
0
Should render stop display without error
def test_render_stop_display(self, get_formatted_stack_frame: MagicMock): get_formatted_stack_frame.return_value = [ {'filename': 'foo'}, {'filename': 'bar'}, {'filename': os.path.realpath(exposed.__file__)} ] step = MagicMock() exposed.render_stop_display(step, 'FAKE') self.assertEqual(1, step.report.append_body.call_count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unrendered(self) -> str:", "def _stop(self):\n self.display_end_message()", "def _render(self) -> None:\n pass", "def err_message(self, message):\n self.errors.append(1)\n message = \"<b>\" + message + \"</b>\"\n self.timer_id = GLib.timeout_add_seconds(5, self.error_false)\n # Show if is was hidden\n if self.hidden:\n self.toggle()\n self.was_hidden = True\n self.left_label.set_markup(message)", "def __showError(self, out):\n self.errorGroup.show()\n self.errors.insertPlainText(Utilities.filterAnsiSequences(out))\n self.errors.ensureCursorVisible()\n \n QCoreApplication.processEvents()", "def death_screen():\n return False", "def getImmediateRendering():\n\n\treturn False", "def display(self):\n print self.careErrors\n\n\n return self.exec_()", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def render(self):\n pass", "def should_show():", "def test_render_stop_display_error(\n self,\n get_formatted_stack_frame: MagicMock,\n render_template: MagicMock\n ):\n get_formatted_stack_frame.return_value = None\n step = MagicMock()\n exposed.render_stop_display(step, 'FAKE')\n self.assertEqual({}, render_template.call_args[1]['frame'])", "def show_error(self):\n print('LSE Error : {}'.format(self._error))", "def view(self):\n\t\tself.done(1)", "def render(self, mode='human', close=False):\n pass", "def aborting(self):\n \n pass", "def renderError(self, error_code):\n\n self.error(error_code)\n self.response.write(\"Oops! Something went wrong.\")", "def renderError(self, error_code):\n\n self.error(error_code)\n self.response.write(\"Oops! Something went wrong.\")", "def render(self):\n _ = self.request.getText\n form = self.request.form\n \n if form.has_key('cancel'):\n # User canceled\n return self.page.send_page(self.request)\n\n try:\n if not self.allowed():\n raise ActionError(_('You are not allowed to edit this page.'))\n elif not self.page.exists():\n raise ActionError(_('This page is already deleted or was never created!'))\n \n self.package()\n except ActionError, e:\n return self.page.send_page(self.request, msg=e.args[0])", "def render(self, mode='human'):\n pass # no use in this situation", "def stopProducing(self):\n pass", "def test_visualisations_cancel_visualisation_render_data_refresh_job(self):\n pass", "def start_render() -> None:\n get_window().clear()", "def render(self, mode='human', close=False):\n pass", "def render(self, mode='human', close=False):\n return None", "def dspyRender(self):\n pass", "def stop_animation_on_error(failure, controller):\n controller.stop_loading_animation()", "def render(self):\r\n super().render()", "def stop(self):", "def stop(self):", "def render(self, mode='human'):", "def stop(self) -> None:", "def stop(self) -> None:", "def stop(self):\n return", "def normalExitWithoutErrors(self):\n return self.normal and self.errors.toPlainText() == \"\"", "def render(self, screen):\n pass", "def render(self, screen):\n pass", "def stop(self):\n self.halt = True", "def _stop(self):\n return True", "def stop(self):\n if self.isCompiled():\n glUseProgram(0)\n else:\n raise Exception(\"el programa no ha sido compilado aun\")", "def bad_request(error):\r\n\treturn render_template('error_template.html' , title = \"Aaaah ...\", \r\n\t\t\t\t\t\t\t\t\t\t\t\t\tmessage = \"나는 이해하지 못한다.\",\r\n \t\t\t\t\t\t\t\t\t\t\t\tsubline = \"Yeah, the server couldn't understand what you asked for, probably because you didn't give a choice of download.\", \r\n \t\t\t\t\t\t\t\t\t\t\t\timage_location = url_for('static', filename = 'images/simpson-gangam.jpg')), 400", "def render() -> None:\n first = True\n while True:\n action = None # type: Optional[int]\n if not first:\n action = signal_queue.get()\n\n if first or action == SHOULD_RERENDER:\n errors = rasaeco.render.once(scenarios_dir=scenarios_dir)\n for error in errors:\n print(error, file=stderr)\n\n if not errors:\n print(\n f\"{prefix}: The scenarios have been re-rendered.\", file=stdout\n )\n first = False\n\n elif action == SHOULD_STOP:\n return\n\n else:\n raise AssertionError(f\"Unexpected action: {action}\")", "def render_mainpageerror(errormsg) -> 'html':\n return render_template('mainpage.html',\n title='Word association',\n analyse=errormsg)", "def stop(self):\r\n pass", "def end_rendering(self, output):\n if self.wrapper_to_generate:\n output = self.div(output, id=self.id, class_='nagare-generated nagare-async-view')\n\n return output", "def renderHTTP(self, ctx):\n return 'Keep trying. You are almost there.'", "def stop(self):\n\t\tpass", "def display_error(self, message):\n self.ui_widget.display_error(message=message)", "def pre_stop(self):", "def __call__(self):\n self.show()", "def lazy_display_error(self, filename):\n position = self.cursor()\n error = self.get_error_at(position)\n if error:\n report = error.get_truncated_message(position, self.width() - 1)\n self.raw_message(report)", "def render(self):\n logging.info(self._get_status())\n pass", "def stop (self):\n pass", "def stop (self):\n pass", "def error():\n return render_template(\"error.html\", **locals())", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def __showErrorMessage(self):\r\n # show messages if not in the test mode and there is an errorMessage\r\n if self.testingMode == False and self.errorMessage != None: \r\n self.setStyleSheet(\"QMessageBox{background: self.primaryColor; }\"); # change the color theme in case of an error\r\n self.msgBox.warning(self, \"Error\", self.errorMessage, QMessageBox.Ok, QMessageBox.Ok)\r\n self.setStyleSheet(\"background-color:\" + MAIN_WINDOW_SECONDARY_COLOR + \";\"); # return the color theme to its original\r\n self.errorMessage = None", "def error_out(self) -> bool:\n return self._action == 'error'", "def showerrors():\n errorMessages = middleware.ixn.showErrorMessage(silentMode=True)\n if errorMessages:\n print(errorMessages)\n print()", "def result_display(result):\n raise TryNext", "def stop(self):\r\n # productive\r\n profprint()\r\n self.logic.changeCursor(0)\r\n self.removeObservers()\r\n self.fiducialObturatorButton.checked = 0\r\n self.fiducialButton.checked = 0\r\n self.validationNeedleButton.checked = 0", "def error():\n return render_template(\"404.html\")", "def indicate_error(self):\n pass", "def __exit__(self, type, value, traceback) -> bool:\n self.logs = list(HammerVLSILogging.output_buffer)\n HammerVLSILogging.enable_buffering = self.old_enable_buffering\n HammerVLSILogging.output_buffer.clear()\n HammerVLSILogging.enable_colour = self.old_enable_colour\n # Return True (normal execution) if no exception occurred.\n return True if type is None else False", "def displaysuspicions(self):\n raise NotImplementedError()", "def halt(self, _):\n self.execution_manager.is_submission_enabled = False\n self.menu_structure['terminate'] = ('main', [('Continue submitting jobs', self.enable_submission)])\n self.__back_to_main()", "def stop() -> None:", "def display_message():", "def stop(self) -> None:\n ...", "def showMessage(self):", "def fail():\n sys.stdout.write('%s[ fail ]%s\\n' % (colors.RED, colors.RESET))", "def render(self, mode='human'):\n\n pass", "def clearerror(self):\n yield from self.command('clearerror')\n return True", "def _stop_display(self):\n if self._proc and not self._proc.returncode:\n pid = self._proc.pid\n os.kill(pid, signal.SIGKILL)", "def _stop(self):", "def test(done = None):\n print(inspect.stack()[1][3])\n if(done):\n flash('New Entry Done!')\n return render_template('webpage/index1.html')", "def stop():", "def stop():", "def stop():", "def stop():", "def render(self):\n raise NotImplementedError()", "def finish_render():\n get_window().static_display = True\n get_window().flip_count = 0\n get_window().flip()", "def show_error(self, error):\n if (error == \"\"):\n self.ui.errorLabel.setText(\"\")\n else:\n self.ui.errorLabel.setText(\"<span style=\\\"font-weight:600; color:#ff0000;\\\">{0}</span>\".format(error))", "def interrupt(self):\n return True", "def get(self):\n self.render_front()", "def stop(self):\n if self.view is not None:\n self.view.stop()", "def show_crash(self):\n print(\"Crash! Oh noes!\")" ]
[ "0.6397472", "0.63438386", "0.6252806", "0.6063289", "0.6059273", "0.6047525", "0.6009877", "0.5999331", "0.5892407", "0.5892407", "0.5892407", "0.5892407", "0.5892407", "0.5892407", "0.5881014", "0.5866852", "0.5825784", "0.5824481", "0.58154", "0.5803385", "0.57909334", "0.57909334", "0.5789175", "0.57837844", "0.57657266", "0.57554966", "0.57509196", "0.575001", "0.5699574", "0.5695399", "0.5693672", "0.5688957", "0.56823015", "0.56823015", "0.56674033", "0.56543195", "0.56543195", "0.5653285", "0.5651552", "0.563393", "0.563393", "0.5629282", "0.5627017", "0.5626169", "0.56242394", "0.562286", "0.56170946", "0.5614755", "0.5606513", "0.56011105", "0.5593455", "0.55822533", "0.55727506", "0.55696815", "0.5560116", "0.5554454", "0.55386794", "0.55386794", "0.5536637", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55283445", "0.55225825", "0.5508094", "0.55078083", "0.5507504", "0.5492245", "0.5487999", "0.54808205", "0.54760146", "0.54746866", "0.5471612", "0.5469588", "0.5463404", "0.54511744", "0.544637", "0.5445978", "0.5443108", "0.5442537", "0.5440934", "0.5439923", "0.5436688", "0.54352635", "0.54352635", "0.54352635", "0.54352635", "0.54318786", "0.5426651", "0.5421769", "0.5416531", "0.5415284", "0.54122996", "0.54111403" ]
0.0
-1
Should render an empty stack frame when the stack data is invalid.
def test_render_stop_display_error( self, get_formatted_stack_frame: MagicMock, render_template: MagicMock ): get_formatted_stack_frame.return_value = None step = MagicMock() exposed.render_stop_display(step, 'FAKE') self.assertEqual({}, render_template.call_args[1]['frame'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_empty_stack() -> None:\n with raises(GrammarParseError):\n grammar_parser.parse(\"ab}\", lexer_mode=\"VALUE_MODE\")", "def empty(self):\r\n return len(self.stack) == 0", "def show_stack(self) -> None:\n print(\"Show stack: \")\n ok = 1\n for i in reversed(self.items):\n print(i)\n ok = 0\n if ok:\n print(\"The stack is empty!\")\n print(\"\\n\")", "def empty(self) -> bool:\n if len(self.stack)==0:\n return True\n else:\n return False", "def is_empty(self):\n return len(self.the_stack) == 0", "def empty(self) -> bool:\n if len(self.input_stack)==0 and len(self.output_stack)==0:\n return True\n else:\n return False", "def test_empty_stack_has_no_value(empty_stack):\n assert empty_stack.top is None", "def is_empty(self):\n return len(self.stack) == 0", "def empty(self):\n return len(self.stack) == 0", "def empty(self):\n return len(self.stack) == 0", "def empty(self) -> bool:\n if len(self.stackOut) == 0 and len(self.stackIn) == 0:\n return True", "def empty(self):\r\n return self.stack == []", "def empty(self) -> bool:\n return len(self.input_stack) == 0 and len(self.output_stack) == 0", "def empty(self):\n return len(self.stack1) == 0 and len(self.stack2) == 0", "def empty(self):\n return len(self.stack1) == 0 and len(self.stack2) == 0", "def empty(self):\n return len(self.stacks[self.activeStackIndex]) == 0", "def empty(self):\n return self.stack == []", "def undo_stack_not_empty(self):\n return self.undo_stack.stack_not_empty()", "def empty(self) -> bool:\n return not self._input_stack and not self._output_stack", "def empty(self):\n return self.input_stack == [] and self.output_stack == []", "def empty(self) -> bool:\n return len(self.stack) == 0", "def empty(self) -> bool:\n return not self.stack", "def empty(self) -> bool:\n return not self.stack", "def isEmpty(self):\n return len(self.stack) == 0", "def is_empty(self):\n\n return not self._stack", "def isEmpty(self):\n return not self.stack", "def make_empty_stack():\n return Stack(0, None)", "def empty(self) -> bool:\n return 1 if not self.stack else 0", "def test_peek_empty():\n test_stack = stack.Stack()\n\n with pytest.raises(stack.StackEmptyError):\n test_stack.peek()", "def empty_stack(stack):\n if stack.top is None:\n return True\n else:\n return False", "def is_empty(self): #checks to see if stack is empty or not\n if self.num_items == 0:\n return True\n else:\n return False", "def is_empty(self):\n return self._stack_items == []", "def test_render_stop_display(self, get_formatted_stack_frame: MagicMock):\n get_formatted_stack_frame.return_value = [\n {'filename': 'foo'},\n {'filename': 'bar'},\n {'filename': os.path.realpath(exposed.__file__)}\n ]\n step = MagicMock()\n exposed.render_stop_display(step, 'FAKE')\n self.assertEqual(1, step.report.append_body.call_count)", "def empty(self) -> bool:\n return self._stack.empty()", "def invalid(self):\n pass", "def frames():\n raise RuntimeError('Must be implemented by subclasses.')", "def empty(self) -> bool:\n return (not self.inStack) and (not self.outStack)", "def test_empty_template(self):\n with TemplateRenderThread('yaml_file_empty.t', 'yaml_file_empty.tmp.out') as renderer:\n def check_render_got_exception():\n return renderer.raised_exception\n\n self.assertTrue(legion.utils.ensure_function_succeed(check_render_got_exception, 5, 3))\n\n self.assertIsNotNone(renderer.raised_exception)\n self.assertIsInstance(renderer.raised_exception, Exception)\n self.assertEqual(renderer.raised_exception.args[0], 'Template doesnt use any plugin')", "def test_size_on_empty_stack(empty_stack):\n assert empty_stack._size == 0\n empty_stack.push(1)\n assert empty_stack._size == 1", "def test_stack_none(new_dque):\n assert new_dque.new_dll.head is None\n assert new_dque.new_dll.tail is None\n assert new_dque.size() == 0", "def is_empty(self, trace) -> bool:\n return len(trace) == 0", "def _debug_stack(self):\n debug(\"current stack: %s\" % self.calc.stack)", "def is_stack(self) -> bool:\n return self.layers > 1", "def test_no_data():\n response = test_app.post(\"/bkt_service/unwind\", expect_errors=True)\n assert response.status == '400 Bad Request'\n assert \"No data\" in response.text", "def validateBedGraph(df):\n try:\n msg = ''\n if df.empty:\n return [False, 'Not a valid dataframe'] \n if df.isnull().values.any() == True: \n msg = 'Missing values' + '\\n' + str(df.isnull().sum())\n return [False, msg]\n return [True, msg]\n except (TypeError, AttributeError, KeyError):\n return [False, 'Not a valid dataframe']", "def checkFramesHaveData(self, event=None):\n for idx, frame in enumerate(self.frameList):\n if frame.hasRequiredData():\n self.frameBtnList[idx].config(state=\"normal\")\n else:\n frame.clearFrame()\n self.frameBtnList[idx].config(state=\"disabled\")\n if frame != self.contentFrame.currFrame:\n frame.built = False", "def test_empty_val_on_insert(empty_stack):\n with pytest.raises(TypeError) as e:\n empty_stack.push(None)\n assert str(e.value) == 'Cannot push a value of none'", "def test_view_with_bad_blank_data(self):\n site = Site.test_objects.create_site('site name')\n response = self.client.post(\n reverse('streamwebs:camera_point_add',\n kwargs={'site_slug': site.site_slug}\n ), {\n 'camera_point-TOTAL_FORMS': '3', # 3 for now\n 'camera_point-INITIAL_FORMS': '0', # none are prefilled\n 'camera_point-MAX_NUM_FORMS': '3',\n 'camera_point-MIN_NUM_FORMS': '3',\n\n 'form-TOTAL_FORMS': '3', # 3 for now\n 'form-INITIAL_FORMS': '0', # none are prefilled\n 'form-MAX_NUM_FORMS': '3',\n 'form-MIN_NUM_FORMS': '3',\n }\n )\n self.assertFormError(response, 'camera_form', 'cp_date',\n 'This field is required.')\n self.assertTemplateUsed(\n response,\n 'streamwebs/datasheets/camera_point_add.html'\n )", "def test_empty(self):\n self.assertRaisesInternalError(())", "def empty(self):\r\n if len(self.pushStack) == len(self.popStack) == 0:\r\n return True\r\n return False", "def test_func_stack(self):\n cmd = \"deref $_stack()\"\n self.assertFailIfInactiveSession(gdb_run_cmd(cmd))\n res = gdb_start_silent_cmd(cmd)\n self.assertNoException(res)\n if is_64b():\n self.assertRegex(res, r\"\\+0x0*20: *0x0000000000000000\\n\")\n else:\n self.assertRegex(res, r\"\\+0x0.*20: *0x00000000\\n\")", "def test_blank_problem(self):\r\n xml_str = \"<problem> </problem>\"\r\n\r\n # Create the problem\r\n problem = new_loncapa_problem(xml_str)\r\n\r\n # Render the HTML\r\n rendered_html = etree.XML(problem.get_html())\r\n # expect that we made it here without blowing up\r", "def visualizar(self):\n print(self.stack)", "def test_built_in_renderer_works_correctly_with_none(self):\n data = None\n rendered = self.renderer.render(\n data=data, media_type=\"application/json\",\n )\n\n self.assertEqual(b\"\", rendered)", "def test_empty_dataframe_during_daily_data_generation(self):\n # if we have an empty data frame, we should get one back\n result = self.post_processor._generate_daily_data(DataFrame())\n self.assertTrue(result.empty)", "def empty(self) -> bool:\n return len(self.sk2.stack) <= 0 if len(self.sk1.stack) == 0 else False", "def _expect_empty(self):\n\n item = self._lexer.get_token()\n if item:\n line_no, token = item\n raise ParseError(u\"Unexpected token '{0}' on line {1}\"\n .format(common.from_utf8(token.strip()), line_no))", "def refresh_stack(self):\n self.stack, _ = self.compute_stack(self.fullstack)\n # find the current frame in the new stack\n for i, (frame, _) in enumerate(self.stack):\n if frame is self.curframe:\n self.curindex = i\n break\n else:\n self.curindex = len(self.stack)-1\n self.curframe = self.stack[-1][0]\n self.print_current_stack_entry()", "def renderFrame(self):\n assert self.notify.debugStateCall(self, 'loginFSM', 'gameFSM')\n\n # Make sure any textures are preloaded before we render.\n gsg = base.win.getGsg()\n if gsg:\n render2d.prepareScene(gsg)\n\n base.graphicsEngine.renderFrame()", "def test_context_data_no_messages_for_invalid_form(self):\n response = self.client.get(self.get_url(), {'description': '', 'name': ''})\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 0)", "def test_context_data_no_messages_for_invalid_form(self):\n response = self.client.get(self.get_url(self.study.pk), {'description': ''})\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 0)", "def test_context_data_no_messages_for_invalid_form(self):\n response = self.client.get(self.get_url(self.study.pk), {'description': ''})\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 0)", "def do_display_stack_ascii(self, address):\n if self.reader.exception is None:\n print(\"Minidump has no exception info\")\n return\n if len(address) == 0:\n address = None\n else:\n address = self.ParseAddressExpr(address)\n self.padawan.PrintStackTraceMessage(address)", "def hasCurrentFrame(self):\n if self.currentFrame == []:\n return False\n return True", "def is_empty(self):\n raise NotImplimentedError", "def test_no_data(self):\n for demo in State.GROUP_NAMES:\n response = self.client.get(reverse('education:demographic_detail',args=(demo,)))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context.get(\"json_rate_data\"), None)\n self.assertNotEqual(response.context.get(\"message\"), None)\n self.assertContains(response, \"Home\")\n self.assertContains(response, \"No Data Available\")\n self.assertNotContains(response, '<svg id=\"popsvg\"')", "def test_context_data_no_messages_for_invalid_form(self):\n response = self.client.get(self.get_url(), {'description': ''})\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 0)", "def test_context_data_no_messages_for_invalid_form(self):\n response = self.client.get(self.get_url(), {'description': ''})\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 0)", "def test_empty_game(self):\n sgf = \"()\"\n try:\n coll = parseSgf(sgf)\n except SgfParseError, e:\n if str(e) == \"Empty game.\":\n return\n self.assertFalse(1)", "def draw_empty( self ):\n prefs = self.prefs\n fig = Figure()\n canvas = FigureCanvasAgg( fig )\n dpi = prefs['width'] /prefs['width_inches']\n height_inches = prefs['height'] / float(dpi)\n fig.set_size_inches( prefs['width_inches'], height_inches )\n fig.set_dpi( dpi )\n fig.set_facecolor('white')\n fig.text( .5, .5, \"No data returned by DB query.\", horizontalalignment='center' )\n self.ax = None\n self.fig = fig\n self.canvas = canvas", "def form_invalid(self, *args, **kwargs):\n\t\tcontext = self.get_context_data()\n\t\tcontext.update(kwargs)\n\t\treturn self.render_to_response(context)", "def form_invalid(self, *args, **kwargs):\n\t\tcontext = self.get_context_data()\n\t\tcontext.update(kwargs)\n\t\treturn self.render_to_response(context)", "def test_Input_Invalid_Data(self):\n height = StringVar(self.root, 0)\n width = StringVar(self.root, -45)\n mines = StringVar(self.root, 3)\n with self.assertRaises(Exception) as context:\n self.menu.createGameWindow('Custom', height, width, mines)\n self.assertTrue('Invalid data' in str(context.exception))", "def test_undefined_as_null_indicator_no_prepared_statements(self):\n self.custom_null_indicator_template('undefined', copy_from_options={'PREPAREDSTATEMENTS': 'False'})", "def print_elments(stack):\n\n if len(stack) != 0:\n for value in stack:\n print(value,end=' ')\n else:\n print(\"is Empty\")", "def test_BBBP_erroneous_data_removed_structural(self):\n cp = Plotter.from_smiles(self.data_BBBP_erroneous_smiles[\"smiles\"], target=self.data_BBBP_erroneous_smiles[\"target\"], target_type=\"C\", sim_type=\"structural\")\n self.assertEqual(len(cp._Plotter__df_descriptors.index), len(self.data_BBBP_erroneous_smiles.index) - len(self.list_BBBP_erroneous_smiles))", "def raise_on_invalid(self) -> None:\n if not self.is_valid:\n raise InvalidDataFrameError(self.report)", "def test_blank_content_object_debug(self):\n tmpl = Template(\"\"\"\n output:\n {% load editregion %}\n {% editregion \"test\" obj %}fallback{% endeditregion %}\n \"\"\")\n with self.assertRaisesRegexp(ValueError, \"content_object was probably \"\n \"'', check the context \"\n \"provided\"):\n tmpl.render(Context()).strip()", "def unrendered(self) -> str:", "def i_am_empty():\n pass", "def test_no_data(self):\n response = self.client.get(reverse('education:index'))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context.get(\"json_data\"), None)\n self.assertContains(response, \"High School Graduation\")\n self.assertContains(response, \"How Rates Were Calculated\")\n self.assertContains(response, \"Home\")\n self.assertNotContains(response, '<svg id=\"graduation_rate_map\"')", "def test_missing_data(self):\n\n response = self.client.post(\n self.reg_url,\n {},\n format=\"json\")\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertIn(b\"required\", response.content)", "def test_invalid_data_is_empty(self, app, data_queues):\n res = self._call(app, {\"invalid\": 0}, ip=self.test_ip, status=200)\n self.check_response(data_queues, res, \"ok\")\n self.check_queue(data_queues, 0)", "def _ensure_dframe(self):\n if self.dframe is None:\n self.dframe = self.dataset.dframe()", "def f_empty(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def form_invalid(self, form):\n if self.unsucess_template:\n self.template_name = self.unsucess_template\n return self.render_to_response(self.get_context_data(form=form))", "def test_no_level_instruction(self):\n data = self.valid_payload\n data[\"level_instruction\"] = \"\"\n response1 = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n del data[\"level_instruction\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def handle_missing_data(self, dataframe):\n return dataframe", "def render(self):\n fmt = 'B' + 'B' * len(self.frame)\n self.sendPacket(6, struct.pack(fmt, self.start_code, *self.frame))", "def is_empty(self):\n return len(self.top) == 0", "def _discardCallFrame( self , fName ) :\n if fName == self.debuggerFName or self.hide:\n self.hide = self.hide + 1\n if self.hide:\n return True\n return False", "def print_empty(self):\n ...", "def root_frame(self):\r\n if not hasattr(self, '_root_frame'):\r\n self._root_frame = Frame()\r\n\r\n # define a recursive function that builds the hierarchy of frames given the\r\n # stack of frame identifiers\r\n def frame_for_stack(stack):\r\n if len(stack) == 0:\r\n return self._root_frame\r\n\r\n parent = frame_for_stack(stack[:-1])\r\n frame_name = stack[-1]\r\n\r\n if not frame_name in parent.children_dict:\r\n parent.add_child(Frame(frame_name, parent))\r\n\r\n return parent.children_dict[frame_name]\r\n\r\n for stack, self_time in self.stack_self_time.iteritems():\r\n frame_for_stack(stack).self_time = self_time\r\n\r\n return self._root_frame", "def f_is_empty(self):\n raise NotImplementedError(\"Implement this!\")", "def test_post_invalid(self):\n self.post_data['name'] = ''\n response = self._post()\n self.assertEquals(self.model.objects.count(), 0)\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('form' in response.context)\n self.assertTrue(response.context['form'].is_bound)\n self.assertFalse(response.context['form'].is_valid())", "def test_post_empty_data(self):\n response = self.app.post('/_ah/push-handlers/receive_message')\n self.assertEqual(response.status_int, 200)\n self.assertEqual(response.body, \"No request body received\")\n self.assertRaises(ValueError)", "def form_invalid(self, form, formsets):\n return self.render_to_response(\n self.get_context_data(form=form, formsets=formsets)\n )", "def test_Empty_Input(self):\n height = StringVar(self.root)\n width = StringVar(self.root)\n mines = StringVar(self.root)\n with self.assertRaises(Exception) as context:\n self.menu.createGameWindow('Custom', height, width, mines)\n self.assertTrue('Invalid data type' in str(context.exception))", "def empty(self):\n return not self.mystack1 and not self.mystack2", "def is_empty(self):\n return self.top == -1" ]
[ "0.6172748", "0.6088951", "0.6082694", "0.6078208", "0.6071998", "0.6070815", "0.604263", "0.6012433", "0.5990902", "0.5990902", "0.5970938", "0.5947907", "0.58997005", "0.58715206", "0.58715206", "0.5864916", "0.58454406", "0.58197826", "0.58059406", "0.5792144", "0.5775396", "0.57690877", "0.57690877", "0.5745798", "0.573892", "0.5702028", "0.5697813", "0.56972295", "0.56751746", "0.5668252", "0.5634032", "0.5632739", "0.5562996", "0.5500035", "0.54908293", "0.5480721", "0.54478365", "0.544494", "0.5314978", "0.5284919", "0.5258853", "0.5257186", "0.523484", "0.5212821", "0.52059233", "0.51257133", "0.51240504", "0.5115339", "0.510172", "0.50921255", "0.5081715", "0.50692034", "0.50670964", "0.5054213", "0.5037892", "0.5034534", "0.5033873", "0.50210816", "0.5006075", "0.49910122", "0.4988055", "0.4988055", "0.49839088", "0.49617463", "0.49590275", "0.49535775", "0.49364537", "0.49364537", "0.4929528", "0.49271467", "0.4926927", "0.4926927", "0.49182284", "0.49152392", "0.4913725", "0.49069482", "0.49049452", "0.49007642", "0.49003315", "0.48958954", "0.48936158", "0.4892167", "0.4881771", "0.48808527", "0.4872285", "0.48693565", "0.48653227", "0.48642772", "0.48608208", "0.4859096", "0.48587298", "0.48582774", "0.48573977", "0.48549473", "0.4854793", "0.48514155", "0.48448995", "0.48364383", "0.4835559", "0.4831494" ]
0.61049175
1
Should create an absolute path within the project.
def test_project_path(self): ep = exposed.ExposedProject() project = MagicMock() project.source_directory = os.path.realpath(os.path.dirname(__file__)) ep.load(project) result = ep.path('hello.md') self.assertTrue(result.endswith('{}hello.md'.format(os.sep)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_path(self, filename):\n return os.path.join(self.root_path, filename)", "def project_root() -> Path:\n return PROJECT_ROOT", "def force_absolute(base, path):\n if os.path.abspath(path) and os.path.exists(path):\n return path\n else:\n return path_format(base + path)", "def resolve_path(self):\n # This is the fixed directory template\n out_dir = os.path.join(opts.base_dir, self.board, self.dir)\n\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n os.chdir(out_dir)", "def make_local_path(self, *args):\n return os.path.normpath(os.path.join(\n os.path.dirname(api.env.real_fabfile), *args).rstrip(os.path.sep))", "def absPath(path):\n return os.path.join(os.path.dirname(os.path.abspath(__file__)), path)", "def _abs_path(rel_path):\n return os.path.join(BASE_DIR, rel_path)", "def fake_full_path(self) -> PurePath:\n if self.category:\n # Giza wrote out yaml file artifacts under a directory. e.g. steps-foo.yaml becomes\n # steps/foo.rst\n return self.source_path.parent.joinpath(\n PurePath(self.category), self.output_filename\n )\n return self.source_path", "def test_root() -> Path:\n return TEST_ROOT", "def make_relative(self,basepath = None):\n __make_relative__(run_object=self,basepath=basepath)", "def get_absolute_path(*args):\n directory = os.path.dirname(os.path.abspath(__file__))\n return os.path.join(directory, *args)", "def path_creator(rel_path=''):\n if platform.system() != 'Windows':\n if rel_path == '':\n path_list=sys.argv[0].split('/')[:-1]\n return '/'.join(path_list)\n else:\n path_list = sys.argv[0].split('/')[:-1]\n return '/'.join(path_list) + '/' + rel_path\n else:\n if rel_path == '':\n path_list=sys.argv[0].split('\\\\')[:-1]\n path_res='\\\\'.join(path_list)\n return path_res\n else:\n path_list = sys.argv[0].split('\\\\')[:-1]\n rel_path=rel_path.split('/')\n path_res='\\\\'.join(path_list) + '\\\\' + '\\\\'.join(rel_path)\n return path_res", "def _makeAbsolute(fname):\n if fname[0] != '/':\n return os.path.join(os.getcwd(), fname)\n else:\n return fname", "def absolute(self):\n if self.relative == '':\n return self.root # don't join in this case as that appends trailing '/'\n return os.path.join(self.root, self.relative)", "def _path(self):\n if self.target[-1] != \"/\":\n self.target += \"/\"\n\n if \"/\" in self.source:\n self.path = self.target + self.source.split(\"/\")[-1]\n else:\n raise NotImplementedError(\"This software is not done for Windows\")\n if self.method == \"git\":\n self.path = self.path.replace(\".git\", \"\")", "def test_project_path(self):\n\n # Without arguments\n project_root_path = os.path.abspath(os.path.join(\n MY_DIRECTORY, '..', '..'\n ))\n self.assertEqual(project_root_path, paths.project())\n\n # With arguments\n self.assertEqual(\n MY_PATH,\n paths.project('tracksim', 'tests', 'test_tracksim.py')\n )", "def __make_path(self, filename):\n return self.__path() + os.sep + filename", "def _make_path(self) -> str:\r\n path_ = Path(path.join(conf.instance.output_path, self.path_prefix, self.name))\r\n if self.is_identifier_in_paths:\r\n path_ = path_ / self.identifier\r\n return path_", "def _abs_path(fn):\n return os.path.join(os.path.dirname(__file__), fn)", "def get_path(self):\n raise NotImplementedError(\"This asset does not support absolute paths\")", "def build_path(self, *args):\n components = self.build_config + args\n return PATH.join(\n self.name,\n *components\n )", "def build_path(path: Union[Path, str], path_is_absolute: bool = False) -> Path:\n if not path_is_absolute:\n return Path(os.getcwd()) / path\n if isinstance(path, str):\n return Path(path)\n return path", "def createAbsPath(self,filename):\n if \"Subfolder\" in self.basepath:\n print \"Warning !!!\\nYou provided baspath: \"+self.basepath +\"\\nThis includes /Subfolder/. You probably need to specify the path without Subfolder.\" \n return self.basepath + os.sep + filename.replace(\"\\\\\", os.sep)\n\n # TODO: currently this returns only files (relpath), everything else is ignored although messages are fully parsed\n # change such that all parsed commands are returned\n # filtering for \"relpaths\" or other commands should happen outside", "def _get_absolute(self, path: Path) -> Path:\n return path.expanduser().absolute()", "def _get_relative_path(self, abs_path):\r\n relative_path = os.path.relpath(abs_path, settings.PROJECT_ROOT)\r\n return relative_path", "def _ensure_relative_directory(self, path):\n tgt = os.path.join(os.getcwd(), path)\n try:\n os.makedirs(tgt)\n except OSError as exc: # Python >2.5\n if exc.errno == errno.EEXIST and os.path.isdir(path):\n pass\n else:\n raise\n return tgt", "def makePath(path):\n\n compatPath = os.path.abspath(os.path.expanduser(path))\n\n return compatPath", "def get_path_to(self, *args):\n return os.path.abspath(os.path.join(os.path.dirname(__file__), *args))", "def data_path(path: str, createdir: bool = False) -> str:\n path_obj = Path(path)\n if not path_obj.is_absolute():\n if inside_project():\n path_obj = Path(project_data_dir(), path)\n else:\n path_obj = Path(\".scrapy\", path)\n if createdir and not path_obj.exists():\n path_obj.mkdir(parents=True)\n return str(path_obj)", "def path(self, name):\n raise NotImplementedError(\"This backend doesn't support absolute paths.\")", "def path(self, name):\n raise NotImplementedError(\"This backend doesn't support absolute paths.\")", "def test_get_absolute_path():\n eq_(get_absolute_path(\"http://foo.com/bar/baz\", \"../foo\"), \"/bar/foo\")\n eq_(get_absolute_path(\"http://foo.com/bar/baz\", \"/foo\"), \"/foo\")", "def test_relative_paths(self):\n command_line = self._MENU + [\n \"some_pool\",\n \"../dev\",\n \"./fake\",\n \"/abc\",\n ]\n TEST_RUNNER(command_line)", "def _get_absolute_path(path):\n abs_path = Path(os.path.abspath(path))\n\n if is_protected_path(abs_path):\n raise errors.ParameterError(f\"Path '{path}' is protected.\")\n\n try:\n abs_path.relative_to(project_context.path)\n except ValueError:\n raise errors.ParameterError(f\"Path '{path}' is outside the project.\")\n\n return abs_path", "def path(self, *args, **kwds):\n def makepath(args, mkdir=True):\n path = os.path.join(self.dir, *args)\n dirname = os.path.dirname(path)\n if mkdir and not os.path.isdir(dirname):\n os.makedirs(dirname)\n return path\n return makepath(args, **kwds)", "def to_absolute_path(path):\n if not os.path.isabs(path):\n return os.path.join(os.getcwd(), path)\n else:\n return path", "def _get_path(): # THIS IS JUST FOR GETTING THE FILE\n return os.path.dirname(os.path.abspath(__file__)) + '/'", "def _make_abspath(value):\n value = value.strip()\n if not os.path.isabs(value):\n value = os.path.abspath(os.path.join(os.getcwd(), value))\n return value", "def test_resource_path(self):\n\n # Without arguments\n resources_root_path = os.path.abspath(os.path.join(\n MY_DIRECTORY, '..', '..', 'resources'\n ))\n self.assertEqual(resources_root_path, paths.resource())", "def test_make_new_dir_2(self):\n new_dir = Path(\"test_dir\")\n Path(self.base_dir, new_dir).mkdir()\n output_path = basic.make_new_dir(self.base_dir, new_dir)\n self.assertIsNone(output_path)", "def test_expand_path_3(self):\n partial_path = \"/fake/path\"\n input_path = \".\" + partial_path\n expanded_path = basic.expand_path(input_path)\n local_path = Path(\".\").resolve()\n expected_path = str(local_path) + partial_path\n self.assertEqual(expanded_path, expected_path)", "def get_path(self, project_file=None):\n root = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', '..')\n )\n if project_file:\n return os.path.join(root, project_file)\n else:\n return root", "def attach_path(path):\n return os.path.join(os.path.dirname(os.path.realpath(__file__)), path)", "def test_expand_path_1(self):\n partial_path = \"/fake/path\"\n input_path = \"~\" + partial_path\n expanded_path = basic.expand_path(input_path)\n home_dir = Path(\"~\").expanduser()\n expected_path = str(home_dir) + partial_path\n self.assertEqual(expanded_path, expected_path)", "def abspath(self):\n if self.__abspath is None:\n self.__abspath = pbxpath.abspath(self)\n return self.__abspath", "def create_paths(bot_name='default'):\n REPOSITORY_PATH = Path(__file__).parent.parent.parent\n\n download_path = Path(str(REPOSITORY_PATH) + \"/source/working_directory\")\n\n download_path = str(download_path).encode(\"latin-1\").decode(\"utf-8\")\n\n if not os.path.exists(os.path.dirname(download_path)):\n os.mkdir(os.path.dirname(download_path))\n\n download_path = Path(str(REPOSITORY_PATH) + \"/source/working_directory/test.txt\")\n if not os.path.exists(os.path.dirname(download_path)):\n os.mkdir(os.path.dirname(download_path))\n\n download_path = Path(str(REPOSITORY_PATH) + \"/source/working_directory/downloads/test.txt\")\n if not os.path.exists(os.path.dirname(download_path)):\n os.mkdir(os.path.dirname(download_path))\n\n download_path = Path(str(REPOSITORY_PATH) + \"/source/working_directory/downloads/\" + bot_name + \"/test.txt\")\n if not os.path.exists(os.path.dirname(download_path)):\n os.mkdir(os.path.dirname(download_path))\n\n download_path = Path(str(REPOSITORY_PATH) + \"/source/working_directory/downloads/\" + bot_name)\n\n return download_path", "def _create_target_path(self, path):\n if not os.path.exists(path) and not self._dry_run:\n logging.debug('Creating target path: %s ...', path)\n try:\n os.makedirs(path)\n except OSError:\n raise LetMeError('Unable to create target path: %s' % path)", "def add_project_path() -> bool:\n project_path = Path('.')\n cur_path = Path(project_path.absolute())\n for parent in cur_path.parents:\n if 'Pipfile' in [obj.name for obj in parent.glob('*')]:\n project_path = Path(parent.absolute())\n break\n\n src_path = project_path.joinpath('src')\n\n if project_path == '.':\n LOGGER.warning(\"Can't find project_path\")\n return False\n\n if src_path not in sys.path:\n sys.path.append(str(src_path.absolute()))\n return project_path", "def absolute_physical_path(self) -> str:\n return self._path", "def make_path(self):\n folders = [\n f\"{self.save_path}{self.name}/json/\",\n f\"{self.save_path}{self.name}/images/\",\n ]\n if hasattr(self, \"masks\"):\n folders.append(f\"{self.save_path}{self.name}/masks/\")\n for folder in folders:\n if not os.path.exists(folder):\n os.makedirs(folder)", "def test_getLinkrelToParentDirectory(self):\n linkrel = self.builder.getLinkrel(FilePath(\"/foo\"),\n FilePath(\"/foo/bar\"))\n self.assertEquals(linkrel, \"../\")", "def test_make_new_dir_1(self):\n test_dir = Path(\"test_dir\")\n output_path = basic.make_new_dir(self.base_dir, test_dir)\n exp_dir = \"test_dir\"\n exp_path = Path(self.base_dir, exp_dir)\n with self.subTest():\n self.assertTrue(exp_path.is_dir())\n with self.subTest():\n self.assertEqual(exp_dir, output_path.stem)", "def path(x):\n return os.path.abspath(os.path.join(os.path.dirname(__file__), x))", "def create_dir(cls, relpath):\r\n safe_mkdir(os.path.join(cls.build_root, relpath))", "def _create_paths(self):\r\n\r\n # Copying the file 'PCU_logs.robot' to the folder with test suites.\r\n if not os.path.exists('\\\\'.join([self.path, self.log_test])):\r\n shutil.copy(self.log_test, self.path)\r\n\r\n # Moving to test suites directory\r\n os.chdir(self.path)\r\n\r\n # Create a directory for the test suite\r\n if not os.path.exists(self.output_dir_path):\r\n os.makedirs(self.output_dir_path)", "def ensure_file_abs_path_valid(file_abs_path: Text) -> Text:\n project_meta = load_project_meta(file_abs_path)\n raw_abs_file_name, file_suffix = os.path.splitext(file_abs_path)\n file_suffix = file_suffix.lower()\n\n raw_file_relative_name = convert_relative_project_root_dir(raw_abs_file_name)\n if raw_file_relative_name == \"\":\n return file_abs_path\n\n path_names = []\n for name in raw_file_relative_name.rstrip(os.sep).split(os.sep):\n\n if name[0] in string.digits:\n # ensure file name not startswith digit\n # 19 => T19, 2C => T2C\n name = f\"T{name}\"\n\n if name.startswith(\".\"):\n # avoid \".csv\" been converted to \"_csv\"\n pass\n else:\n # handle cases when directory name includes dot/hyphen/space\n name = name.replace(\" \", \"_\").replace(\".\", \"_\").replace(\"-\", \"_\")\n\n path_names.append(name)\n\n new_file_path = os.path.join(\n project_meta.RootDir, f\"{os.sep.join(path_names)}{file_suffix}\"\n )\n return new_file_path", "def get_path():\n return path.abspath(path.dirname(path.dirname(__file__)))", "def getPath(project):\n if project == '.sourglass':\n path = project\n else:\n path = os.path.join(basepath, 'logs', project + '.csv')\n try:\n open(path)\n except IOError:\n f = open(path, 'w')\n f.close()\n print(\"Started new project.\")\n return path\n else:\n return path", "def abs_path(self) -> str:\n full_path = '/'.join(folder.name for folder in reversed(self.ancestors))\n return f'/{full_path}/'", "def qualify(path):\n if not absoluteRegexp.search(path):\n path = os.path.join(cwd, path)\n return path", "def getAbsolutePath(relPath):\n currDir = os.path.dirname(__file__)\n return os.path.join(currDir, relPath)", "def BuildPath(DSLModel, table):\n folders = [\n DSLModel['GENERAL']['target_folder'],\n \"%s%s%s\" % (DSLModel['GENERAL']['target_folder'], os.sep, table['name'])\n ]\n for folder in folders:\n if not os.path.exists(folder):\n os.mkdir(folder)\n init_path = folder + os.sep + '__init__.py'\n if not os.path.exists(init_path):\n fh = open(init_path, 'w')\n fh.write('# Module Initialiation File')\n fh.close()\n TemplatesPath = \"%s%s%s%stemplates\" % (DSLModel['GENERAL']['target_folder'], os.sep, table['name'],\n os.sep)\n if not os.path.exists(TemplatesPath):\n os.mkdir(TemplatesPath)", "def get_abspath(path: str) -> str:\n if os.path.isabs(path):\n return path\n\n return os.path.join(os.path.dirname(__file__), path)", "def set_local_path(self):\n return HERE", "def cwd_in_path():\n ...", "def test_relativise_src_under():\n src = pathlib.Path(\"/tmp/foo/bar/baz/src.txt\")\n dst = pathlib.Path(\"/tmp/foo/dst.txt\")\n rel = relativise(src, dst)\n assert rel == pathlib.Path(\"../../dst.txt\")", "def test_get_pyrin_root_path():\n\n root_path = os.path.abspath('.')\n assert application_services.get_pyrin_root_path() == root_path", "def full_path(self):\n return os.path.abspath(self.path)", "def get_resources_abs_path() -> pathlib.Path:\n return PathManager._ROOT.joinpath(\n PathManager._TILINGS_GUI, PathManager._RESOURCES\n )", "def absolute_path(path):\n return os.path.abspath(\n os.path.join(\n os.path.dirname(__file__),\n \"..\",\n path\n )\n )", "def test_repo_relpath(self):\n from os import path\n repodir = \"~/codes/ci/tests\"\n relpath = \"../pyci/config.py\"\n result = path.expanduser(\"~/codes/ci/pyci/config.py\")\n self.assertEqual(result, get_repo_relpath(repodir, relpath))", "def _fixpath(p):\n return os.path.abspath(os.path.expanduser(p))", "def get_abs_path(path):\r\n abs_path = lib_path.abspath(path)\r\n return abs_path", "def build_path(cls, relpath):\r\n if os.path.basename(relpath).startswith('BUILD'):\r\n return relpath\r\n else:\r\n return os.path.join(relpath, 'BUILD')", "def relative_path(__file__, path):\n return os.path.abspath(os.path.join(os.path.dirname(__file__), path))", "def create_absolute_url(path: str) -> str:\n domain = settings.ALLOWED_HOSTS[0]\n return \"https://{domain}{path}\".format(domain=domain, path=path)", "def realpath(self):\n return pbxpath.realpath(self.project(), self.abspath())", "def absPath(myPath):\n try:\n # PyInstaller creates a temp folder and stores path in _MEIPASS\n base_path = sys._MEIPASS\n return os.path.join(base_path, os.path.basename(myPath))\n except Exception:\n base_path = os.path.abspath(os.path.dirname(__file__))\n return os.path.join(base_path, myPath)", "def cwd (self, path):\r\n pass", "def _path(name: str):\n return os.path.join(ASSET_PATH, name)", "def test_url_path(self):\n url = create_url(url=\"http://www.example.com\", path=\"path/to/resource\")\n self.assertEqual(url, \"http://www.example.com/path/to/resource\")", "def path(self):\n ...", "def construct_project_path(self, path):\n project_path = os.path.abspath(os.path.dirname(path))\n\n if path.find(\".py\") != -1:\n python_script = os.path.abspath(os.path.basename(path))\n else:\n python_script = None\n\n if not project_path == os.getcwd():\n project_path = os.getcwd()\n\n return project_path, python_script", "def mkpath(self, _path):\n self.mkdir(path.dirname(_path))", "def path(relative_path):\n try:\n # PyInstaller creates a temp folder and stores path in _MEIPASS\n base_path = sys._MEIPASS\n except Exception:\n base_path = os.path.abspath(\"./\")\n\n print(\"[RESOURCE]\", relative_path)\n rPath = os.path.join(base_path, relative_path)\n return rPath", "def getRootPath()->str:\n if '--develop' in sys.argv:\n return eel._get_real_path('public') + '/'\n\n return eel._get_real_path('build') + '/'", "def _absolute_root(path: _Path) -> str:\n path_ = Path(path)\n parent = path_.parent\n\n if path_.exists():\n return str(path_.resolve())\n else:\n return str(parent.resolve() / path_.name)", "def path(cls, relpath=None):\r\n base = os.getcwd() if not ParseContext._active else cls.locate().current_buildfile.parent_path\r\n return os.path.abspath(os.path.join(base, relpath) if relpath else base)", "def _ensure_path_absolute(maybe_relpath, cfg_path):\n if not isinstance(maybe_relpath, str):\n raise TypeError(\n \"Attempting to ensure non-text value is absolute path: {} ({})\".\n format(maybe_relpath, type(maybe_relpath)))\n if os.path.isabs(maybe_relpath) or is_url(maybe_relpath):\n _LOGGER.debug(\"Already absolute\")\n return maybe_relpath\n # Maybe we have env vars that make the path absolute?\n expanded = os.path.expanduser(os.path.expandvars(maybe_relpath))\n if os.path.isabs(expanded):\n _LOGGER.debug(\"Expanded: {}\".format(expanded))\n return expanded\n # Set path to an absolute path, relative to project config.\n config_dirpath = os.path.dirname(cfg_path)\n _LOGGER.debug(\"config_dirpath: {}\".format(config_dirpath))\n abs_path = os.path.join(config_dirpath, maybe_relpath)\n _LOGGER.debug(\"Expanded and/or made absolute: {}\".format(abs_path))\n return abs_path", "def __init__(self, path):\n self.path = os.path.abspath(path)", "def as_pathlib(self):\n return Path(self.absolute)", "def check_absolute_path(path):\n current_dir = os.getcwd()\n if os.path.isabs(path) is False:\n if str(path).startswith(\"./\"):\n return current_dir + path[1:]\n else:\n return current_dir + \"/\" + path\n else:\n return path", "def real_absolute_path(path):\n return os.path.realpath(absolute_path(path))", "def initilize(self):\n if not self.project_path.exists():\n self.project_path.mkdir()", "def get_png_abs_path() -> pathlib.Path:\n return PathManager._ROOT.joinpath(\n PathManager._TILINGS_GUI, PathManager._RESOURCES, \"img\", \"png\"\n )", "def setup_rawpath(job, raw_path):\n\n logging.info(f\"Destination is {raw_path}\")\n if not os.path.exists(raw_path):\n try:\n os.makedirs(raw_path)\n except OSError:\n err = f\"Couldn't create the base file path: {raw_path}. Probably a permissions error\"\n logging.error(err)\n else:\n logging.info(f\"{raw_path} exists. Adding timestamp.\")\n raw_path = os.path.join(str(job.config.RAW_PATH), f\"{job.title}_{job.stage}\")\n logging.info(f\"raw_path is {raw_path}\")\n try:\n os.makedirs(raw_path)\n except OSError:\n err = f\"Couldn't create the base file path: {raw_path}. Probably a permissions error\"\n raise OSError(err) from OSError\n return raw_path", "def get_project_path():\n file_path = os.path.abspath(__file__)\n root_dir = os.path.join(os.path.dirname(file_path), \"..\")\n return os.path.abspath(root_dir)", "def get_absolute_pathname(self):\n return os.path.join(settings.PRIVATE_STORAGE_ROOT, self.get_relative_pathname())", "def _branchPath(self, path):\n assert self.branch_dir is not None\n return os.path.join(self.branch_dir, path)", "def _absPath(self, relpath):\n\n # Pass through URIs and absolute paths.\n if self.isUrl(relpath) or relpath[0] == '/':\n return relpath\n\n # This won't deal with ~user/ syntax, but it's much less\n # common anyway.\n if relpath.startswith('~/') and 'HOME' in os.environ:\n return os.path.join(os.environ['HOME'], relpath[2:])\n\n if self._configFileStack:\n relativeTo = os.path.dirname(self._configFileStack[-1])\n else:\n relativeTo = os.getcwd()\n\n if self.isUrl(relativeTo):\n parts = urlparse.urlsplit(relativeTo)\n return urlparse.urlunsplit((parts.scheme, parts.netloc, os.path.normpath(os.path.join(parts.path, relpath)), parts.query, parts.fragment))\n return os.path.normpath(os.path.join(relativeTo, relpath))" ]
[ "0.7043294", "0.6796717", "0.679323", "0.676385", "0.67596847", "0.66949475", "0.6657503", "0.6627231", "0.66117865", "0.65952295", "0.65799236", "0.65615535", "0.64732045", "0.6427088", "0.642576", "0.64075583", "0.6396847", "0.6347354", "0.6342947", "0.632171", "0.63044393", "0.62677604", "0.6256172", "0.6254364", "0.62416404", "0.6239738", "0.62363076", "0.62349695", "0.61654544", "0.6165178", "0.6165178", "0.61534286", "0.6144769", "0.6139222", "0.61372805", "0.6133709", "0.61133265", "0.6112703", "0.6106124", "0.60932946", "0.6091859", "0.6085142", "0.60821337", "0.60795265", "0.60639703", "0.6056278", "0.6052574", "0.60519075", "0.6038936", "0.6033218", "0.5993724", "0.59895194", "0.5985216", "0.59841305", "0.59836566", "0.59697545", "0.596851", "0.59636647", "0.5954772", "0.5954691", "0.5944603", "0.5941208", "0.59351814", "0.59274775", "0.59221005", "0.59202945", "0.5908743", "0.5901213", "0.5899109", "0.5897437", "0.5889786", "0.58875245", "0.58867115", "0.58854014", "0.5883645", "0.58813363", "0.58730996", "0.5868569", "0.58668625", "0.58572716", "0.5855669", "0.5852309", "0.5851534", "0.5850125", "0.5848879", "0.58449566", "0.5841903", "0.58393633", "0.5836181", "0.5831807", "0.5819419", "0.581832", "0.5810281", "0.58075494", "0.5797606", "0.5793906", "0.5784422", "0.5772758", "0.57715493", "0.57612365" ]
0.6755603
5
Open the passed file in readbinary mode and write the binary to a string.
def openAndPack(filename): inputfile = open(filename, 'rb') return inputfile.read()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_file_bin_readwrite(self):\n FileWriter(self.binary_path).write_bin(self.binary_string)\n bin_data = FileReader(self.binary_path).read_bin()\n self.assertEqual(bin_data, self.binary_string)", "def get_binary(self, filepath):\n with open(filepath, \"rb\") as f:\n return b64encode(f.read()).decode('utf-8')", "def get_binary(fname):\n with open(fname, 'rb+') as f_name:\n data = f_name.read()\n return data", "def read_binary(self):\n with self.open(\"rb\") as f:\n return f.read()", "def open_binary_file(file_name: str, mode: str = 'rb'):\n return open(os.path.join(DATA_FOLDER, file_name), mode=mode)", "def write_binary(self, path):\n return", "def _save_binary(file_name, data):\n with open(file_name, \"wb\") as f:\n cp.dump(data, f)", "def write_to_binary_file(self, filename):\n\n self.octree.writeBinary(str.encode(filename))", "def test_file_bin_read_unicode_as_bin(self):\n FileWriter(self.unicode_path).write_utf8(self.unicode_string)\n bin_data = FileReader(self.unicode_path).read_bin() #read unicode file as binary\n uni_text = bin_data.decode(\"utf-8\") #decode to utf-8\n self.assertEqual(uni_text, self.unicode_string)", "def write_to_file(original_path, new_path):\n print(f\"[INFO]: Transform data from binary to text file {new_path}\")\n with open(new_path, mode='wt', encoding='utf-8') as new_file:\n with open(original_path, mode='rb') as original_file:\n for line in original_file:\n new_file.write(line.decode())", "def ReadBinaryFile(name):\n\n try:\n fBinary = open(name, 'rb')\n except:\n return None\n try:\n content = fBinary.read()\n except:\n return None\n finally:\n fBinary.close()\n return content", "def read_file_bytes(read_file_path):\n file_string = \"\"\n with open(read_file_path, \"r\") as f:\n for line in f:\n file_string += line\n return bytes(file_string, \"UTF-8\")", "def open_file():\n \n # Open file for binary reading.\n openlocation=fd.askopenfilename()\n with open(openlocation, mode='rb') as file: \n # Read file as bytes held in ints.\n \n fileContent = file.read()\n \n text_store = \"\" # Text that will ultimately be displayed.\n \n for i in fileContent:\n \n b_int = bin(i) # Turn the byte to a string representation of the binary.\n b_int = b_int[2:] # Remove \"0b\" binary marker added by the bin cast. \n \n \n while ((len(b_int)) < 8):\n # As bytes read are truncated to most significant bit, add \n # zeros where needed to make up a byte.\n b_int = \"0\" + b_int\n \n text_store = text_store + b_int\n \n text.delete('1.0', tk.END) # Clear any previous text.\n text.insert(tk.END, text_store) # Add new text.", "def write_bin(file, binary, buffer=None, append=True):\n\n # Get current stream, default or not.\n stream = cp.cuda.get_current_stream()\n\n if buffer is None:\n buffer = cp.asnumpy(binary)\n else:\n binary.get(out=buffer)\n\n if append is True:\n mode = \"ab\"\n else:\n mode = \"wb\"\n\n with open(file, mode) as f:\n stream.synchronize()\n buffer.tofile(f)", "def load(path, binary=False):\n with open(path, 'rb') as handle:\n tmp = handle.read()\n return tmp if binary else tmp.decode()", "def test_binary_contents(file, tmp_path):\n name = tmp_path / \"1px.gif\"\n ret = file.managed(name=str(name), contents=BINARY_FILE)\n assert ret.result is True", "def write_object_file_to_file(self, file_name):\n with open(file_name, 'wb+') as file:\n file.write(self.object_file.to_binary_array())", "def saveBinDataToFile(binaryData, fileToSave):\n saveOK = False\n try:\n # open a file, if not exist, create it\n savedBinFile = open(fileToSave, \"wb\")\n #print \"savedBinFile=\",savedBinFile\n savedBinFile.write(binaryData)\n savedBinFile.close()\n saveOK = True\n except :\n saveOK = False\n return saveOK", "def local_file_as_string(self, file_path):\n with open(file_path, 'rb') as file:\n string = file.read().decode('utf-8')\n return string", "def read_binary(filename):\r\n with open('%s.pickle'%(filename, ), 'rb') as handle:\r\n b = pickle.load(handle)\r\n return b", "def read_file_bytes(filepath: str):\n with open(filepath, \"rb\") as reader:\n return reader.read()", "def testWriteBinaryData(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteBinaryData(b'Binary data')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Binary data'\n self.assertEqual(output_data, expected_output_data)", "def file2str(file):\n with open(file, \"r\") as textFile:\n return textFile.read()", "def write_content_file(file_content):\n\n f = open(workingfile, \"w\")\n if hasattr(file_content, \"decode\"):\n f.write(str(file_content.decode(\"utf-8\")))\n else:\n f.write(str(file_content))\n f.close()", "def saveDataToFile(fullFilename, binaryData):\n with open(fullFilename, 'wb') as fp:\n fp.write(binaryData)\n fp.close()\n # logging.debug(\"Complete save file %s\", fullFilename)", "def file_to_string(path_to_file):\n\t\twith open(path_to_file, 'r') as f:\n\t\t\tcontent = f.read()\n\t\treturn content", "def _load_binary(file_name):\n try:\n with open(file_name, 'rb') as f:\n return cp.load(f)\n except UnicodeDecodeError: # When loading Python 2 pickle from Python 3\n with open(file_name, 'rb') as f:\n return cp.load(f, encoding=\"latin1\")", "def write_file_content_from_base64(file_path: str, data_base64: str) -> None:\n file = open(file_path, \"wb\")\n file.write(base64.b64decode(data_base64))\n file.close()", "def ReadFile(fname, binary=True):\n with open(Filename(fname), binary and 'rb' or 'r') as fd:\n data = fd.read()\n #self._out.Info(\"Read file '%s' size %d (%#0x)\" %\n #(fname, len(data), len(data)))\n return data", "def receiveAndUnpack(binary, filename):\n inputfile = open(filename, 'wb')\n inputfile.write(binary)", "def write_file(bytes, path: str):\n\n newFile = open(path + \".rex\", \"wb\")\n newFile.write(bytes)\n return True", "def load_file(file_path: str, mode: str = \"rb\") -> Union[str, bytes]:\n with open(file_path, mode) as file:\n return file.read()", "def file_to_string(file_name):\n with open(file_name, 'r') as f:\n text = f.read()\n # delete original file\n os.remove(file_name)\n return text", "def fileread(self, filename):\n data = None\n f = open(filename, 'r')\n data = f.read()\n f.close()\n try:\n data = data.decode()\n except (UnicodeDecodeError, AttributeError):\n data = data.encode(\"utf-8\")\n\n return data", "def bin_writer(fpath, fname, data):\n path = fpath + fname + '.dat'\n with open(path, 'ab') as file:\n for row in data:\n file.write(row.encode('utf-8'))\n return None", "async def async_write_bin(path, binary):\n async with aiofiles.open(path, \"wb\") as f:\n await f.write(binary)", "def _open_file_binary(filename):\n for _byte in open(filename, 'rb').read():\n yield _byte", "def string_to_file(path_to_file, string_to_write):\n\t\twith open(path_to_file, 'w+') as f:\n\t\t\tf.write(string_to_write)", "def write_file(filename, string):\n import sys\n #ugly fix, hopefully we can find a better one\n if sys.version_info[0] >= 3:\n with open(filename, 'w', encoding=\"utf-8\") as f:\n f.write(string)\n else:\n with open(filename, 'w') as f:\n f.write(string.encode(\"utf-8\"))", "def pickle_binary(data: object, file: Union[str, Path]) -> None:\n\n with open(str(file), 'wb') as f:\n pickle.dump(data, f)", "def open_file(self, filename):\n # UnicodeDecodeError: 'utf-8' codec can't decode byte 0x89 in position 0: invalid start byte\n #\n # https://docs.python.org/3/library/functions.html#open\n # 'r'\topen for reading (default)\n # 't'\ttext mode (default)\n # 'b'\tbinary mode\n #\n # In text mode (the default, or when 't' is included in the mode argument),\n # the contents of the file are returned as str,\n # the bytes having been first decoded using a platform-dependent encoding or using the specified encoding if given.\n with open(filename, 'rb') as f:\n return f.read()", "def write_binary(self, data, ensure=False):\n if ensure:\n self.dirpath().ensure(dir=1)\n with self.open(\"wb\") as f:\n f.write(data)", "def write_raw(content, file_path):\n file = open(file_path, 'wb')\n file.write(content)\n file.close()", "def write_bytes_to_file(bytes, filename):\n try:\n with open(filename, mode=\"bx\") as file:\n file.write(bytes)\n except FileExistsError:\n os.remove(filename)\n ResourceHandler.write_bytes_to_file(bytes, filename)\n except Exception as e:\n print(e)", "def write_to_file(file: Text, data: bytes):\n with open(file, \"wb\") as w:\n w.write(data)\n w.flush()", "def file_to_str(fname):\n data = None\n # rU = read with Universal line terminator\n with open(fname, 'rU') as f:\n data = f.read()\n return data", "def write_file(name_file, string):\n with open(name_file, 'w') as file:\n file.write(string)", "def test_file_ascii_readwrite(self):\n FileWriter(self.ascii_path).write(self.ascii_string) # file write\n ascii_text = FileReader(self.ascii_path).read() # file read\n self.assertEqual(ascii_text, self.ascii_string)", "def binary_write(iring, file_ext='out', *args, **kwargs):\n return BinaryFileWriteBlock(iring, file_ext, *args, **kwargs)", "def init_bit_string(self, path: str):\n size = 0\n with open(path, 'rb') as file:\n self.bit_string = \"\"\n byte = file.read(1)\n\n # convert bytes to bits\n while (len(byte) > 0):\n byte = ord(byte)\n bits = bin(byte)[2:].rjust(8, '0')\n self.bit_string += bits\n byte = file.read(1)\n size += 1", "def WriteToFile(msg, file_name):\r\n out_msg = str(msg)\r\n file = open(file_name, \"w\")\r\n file.write(str(decoded_msg))", "def read_as_bytes(filename):\n try:\n with open(filename, \"rb\") as file:\n bytes = array.array(\"B\")\n bytes.frombytes(file.read())\n return bytes\n except FileNotFoundError:\n print(f\"File not found: {filename}\")\n exit()", "def write(self, string):\n self.__file.write(string)", "def open_and_encode_file(db, user_name, file_path):\n file_name = db.get_file_name(user_name)\n\n with open(file_path, \"rb\") as fp:\n bts = fp.read()\n b64_bytes = base64.b64encode(bts).decode(\"utf-8\")\n response = {\n \"status\": \"SUCCESS\",\n \"file_name\": file_name,\n \"data\": b64_bytes\n }\n return response", "def get_file(self, size):\n file = open(self.FILENAME, \"w\")\n file.seek(1024 * 1024 * size)\n file.write('\\x00')\n file.close()", "def _write_to_file(self, string):\n with open(self.p.base_dir + '/' + self.p.filename, 'w') as f:\n f.write(string)", "def WriteStringToFile(string, filepath):\n with open(filepath, 'w') as file_handle:\n file_handle.write(string)", "def write(self, file, endian=None):\n if endian is None:\n endian = self.endian\n for item in BINARY_FILE_HEADER_FORMAT:\n length, name, _ = item\n # Unpack according to different lengths.\n if length == 2:\n format = ('%sh' % endian).encode('ascii', 'strict')\n # Write to file.\n file.write(pack(format, getattr(self, name)))\n # Update: Seems to be correct. Two's complement integers seem to be\n # the common way to store integer values.\n elif length == 4:\n format = ('%si' % endian).encode('ascii', 'strict')\n # Write to file.\n file.write(pack(format, getattr(self, name)))\n # These are the two unassigned values in the binary file header.\n elif name.startswith('unassigned'):\n temp = getattr(self, name)\n if not isinstance(temp, bytes):\n temp = str(temp).encode('ascii', 'strict')\n temp_length = len(temp)\n # Pad to desired length if necessary.\n if temp_length != length:\n temp += b'\\x00' * (length - temp_length)\n file.write(temp)\n # Should not happen.\n else:\n raise Exception", "def fread(filename):\n with open(filename, 'r') as f:\n return f.read()", "def get_string(binary):\r\n new_string = \"\"\r\n\r\n # Sets range as length of binary string and returns an int\r\n for x in range((len(binary) // 8)):\r\n # Grabs 8 characters at a time, converts back to an integer\r\n n = int(binary[(x * 8) : ((x * 8) + 8)], 2)\r\n # Special logic to handle null values\r\n if n == 0:\r\n new_string += \"\\\\x00\"\r\n # Otherwise, change those bits back to a character\r\n else:\r\n new_string += n.to_bytes((n.bit_length() + 7) // 8, \"big\").decode()\r\n\r\n return new_string", "def test_file(tmpdir):\n file_path = tmpdir / 'test.txt'\n file_path = file_path.write_binary(b'This is some test data!')\n return file_path", "def read_raw(file_path):\n file = open(file_path, 'rb')\n content = file.read()\n file.close()\n return content", "def read_binary(self):\n length = self.read_uint32()\n bytes = self.data[:length]\n self.data = self.data[length:]\n return bytes", "def base64(path, filename):\n print(uc.base64(path, filename))", "def _binary_string_to_str(binary_string: str, end=None) -> str:\n string = \"\"\n\n binary_list = re.findall(\".\" * 8, binary_string)\n for byte in binary_list:\n string += chr(int(byte, 2))\n if end and string.endswith(end):\n return string[: -len(end)]\n\n return string", "def get_file(self, filename: str, directory: str = 'gcodes', binary: bool = False) -> str:\n raise NotImplementedError", "def save_text_file(i):\n\n fn = i['text_file']\n\n s = i['string']\n\n try:\n s = s.replace('\\r', '')\n except Exception as e:\n pass\n\n try:\n s = s.replace(b'\\r', b'')\n except Exception as e:\n pass\n\n m = 'w'\n if i.get('append', '') == 'yes':\n m = 'a'\n\n try:\n s = s.encode('utf8')\n except Exception as e:\n pass\n\n try:\n # if sys.version_info[0]>2:\n # f=open(fn, m+'b')\n # f.write(s)\n # else:\n f = open(fn, m+'b')\n f.write(s)\n except Exception as e:\n return {'return': 1, 'error': 'problem writing text file='+fn+' ('+format(e)+')'}\n\n f.close()\n\n return {'return': 0}", "def openfile(path:str) -> str:\n with open(file=path, mode='br') as file:\n r_0 = file.readline()\n return str(r_0)", "def save_file(file_bytes: Union[bytes, bytearray],\n save_to_path: str,\n mode: str = \"wb\"):\n with open(save_to_path, mode) as file:\n file.write(file_bytes)", "def str2file(s, f, mode=\"w\"):\n with open(f, mode=mode) as fileobj:\n fileobj.write(s)", "def write_cipher_text(self, data: bytes, out_file: BinaryIO, filename: str):\n\n out_file.write(data)", "def _ReadFile(filepath):\n with open(filepath) as f:\n return f.read()", "def file_as_base64(path):\n with open(path, \"rb\") as file:\n return base64.b64encode(file.read())", "def _read_file(self) -> str:\n with open(self._file_name) as fp:\n return fp.read()", "def openR(fname):\n f=open(fname, 'rb'); a=f.read(3); f.close()\n if a==b'\\xef\\xbb\\xbf': f=open(fname,'r', encoding='utf-8'); f.read(1);\n elif a[:2]==b'\\xff\\xfe' or a[:2]==b'\\xfe\\xff': f = open(fname,'r',encoding='utf16')\n else : f = open(fname,'r')\n return f", "def _get_sample_binary() -> bytearray:\n full_path = path.abspath(__file__).replace(\n DemoBinaryPuller._MODULE_FILE_NAME,\n DemoBinaryPuller._BIN_FILE_NAME)\n with open(full_path, \"rb\") as bin_file:\n binary_content = bin_file.read()\n return binary_content", "def _make_string(self, filename):\n\n if not os.path.isfile(filename):\n str = \"ERROR: Could not find specified XML file %s.\" % filename\n PRINT.info(str)\n raise OSError(str)\n\n return open(filename).read()", "def write_to_file(filepath, data):\n\n with open(filepath, 'w') as f:\n f.write(str(data))", "def to_bytes(cls, pki_type, item_id, file_name):\n with open(file_name, \"rb\") as _file:\n pki_raw_data = _file.read()\n\n data_out = LEUnsigned.pack(cls.TYPE[pki_type], 1) # type\n data_out += LEUnsigned.pack(item_id, 1) # ID\n data_out += LEUnsigned.pack(len(pki_raw_data), 2) # content len\n data_out += pki_raw_data # raw data\n data_out += chr(0xFF) * ((4 - (len(pki_raw_data) % 4)) % 4) # padding\n return data_out", "def open_and_read_file(file_path):\n\n # Open file and read into memory\n text = open(file_path).read().rstrip()\n\n # Replace newlines with space\n #text = text.replace('\\n', ' ')\n\n return text", "def read_file_content_as_base64(file_path: str) -> str:\n file = open(file_path, \"rb\")\n image_base64 = base64.b64encode(file.read()).decode(\"utf-8\")\n file.close()\n\n return image_base64", "def getFileContent(self, filePath, mode):\n with open(filePath, mode) as my_file:\n return my_file.read()", "def write (self, file):\n\t\tfile.write (self.pack ())", "def to_file(self, data, file, pubkey_id):\n must_close = False\n if isinstance(file, str):\n try:\n file = open(file, \"wb\")\n except PermissionError as e:\n raise GPG.EncryptionException(str(e))\n\n result = subprocess.run(\n [GPG.bin, \"--encrypt\", \"-r\", pubkey_id],\n input=data,\n stdout=file,\n stderr=subprocess.PIPE\n )\n if must_close:\n file.close()\n if result.returncode == 0:\n # It was successful\n return\n else:\n raise GPG.EncryptionException(result.stderr)", "def read_file(name):\n with open(name, 'r') as my_file:\n return my_file.read().encode('utf-8')", "def filewrite(self, filename, data):\n try:\n filedata = data.decode(\"utf-8\")\n except Exception:\n filedata = data\n lock = FileLock(filename)\n lock.acquire()\n with open(filename, 'w+') as f:\n f.write(filedata)\n lock.release()", "def open_and_read_file(file_path):\n\n # your code goes here\n text_file = open(file_path)\n text_string= text_file.read()\n text_file.close()\n return text_string", "def bbl_file(self, base_file):\n bbl_path = os.path.abspath(os.path.splitext(base_file)[0]) + '.bbl'\n return self.open_encode_safe(bbl_path).readlines()", "def readfile(fname, mode='rb'):\n f = open(fname, mode)\n raw = f.read()\n f.close()\n return raw", "def save_to(self, f: BinaryIO):\n raise NotImplementedError", "def write(self, filename):\n bvh_string = self.generate_bvh_string()\n if filename[-4:] == '.bvh':\n filename = filename\n else:\n filename = filename + '.bvh'\n with open(filename, 'w') as outfile:\n outfile.write(bvh_string)", "def write_to_binary_file(self, loc: str, data: bytes):\n try:\n os.mkdir(\"../\" + self.uri)\n except FileExistsError:\n pass\n\n f = open(\"../\" + self.uri + loc, \"wb\")\n f.write(data)\n print(\"[WRITE] written to binary file loc\")\n f.close()", "def write(self, binary_log: BinaryLoggable) -> None:\n if binary_log == None:\n return #possibly raise exception\n record_array = binary_log.to_bytes()\n record_len = len(record_array)\n if record_len == 0:\n return #possibly raise exception\n\n log_name = type(binary_log).__name__\n self.file_map.setdefault(log_name, [])\n\n # Writes log_name size and log_name to the end of file\n self.bfile.seek(0,os.SEEK_END)\n self.bfile.write(len(log_name).to_bytes(self.IntLength, byteorder='big'))\n self.bfile.write(bytearray(log_name, self.Encoding))\n\n # Write byte_array size and byte array\n self.bfile.write(record_len.to_bytes(self.IntLength, byteorder='big'))\n self.file_map[log_name].append([self.bfile.tell(),record_len])\n self.bfile.write(record_array)", "def _binary_to_string_handle(handle):\n try:\n # If this is a network handle from urllib,\n # the HTTP headers may tell us the encoding.\n encoding = handle.headers.get_content_charset()\n except AttributeError:\n encoding = None\n if encoding is None:\n # The W3C recommendation is:\n # When no explicit charset parameter is provided by the sender,\n # media subtypes of the \"text\" type are defined to have a default\n # charset value of \"ISO-8859-1\" when received via HTTP.\n # \"ISO-8859-1\" is also known as 'latin-1'\n # See the following for more detail:\n # https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1\n encoding = \"latin-1\"\n wrapped = io.TextIOWrapper(io.BufferedReader(handle), encoding=encoding)\n try:\n # If wrapping an online handle, this is nice to have:\n wrapped.url = handle.url\n except AttributeError:\n pass\n return wrapped", "def read_file(self, file: Path) -> str:\n with open(file) as f:\n return f.read()", "def write_to_file(file_name, content):\n with open(file_name, \"w\") as text_file:\n text_file.write(str(content))", "def verify_fileobj(fileobj, writable=False):\n\n try:\n data = fileobj.read(0)\n except Exception:\n if not hasattr(fileobj, \"read\"):\n raise ValueError(\"%r not a valid file object\" % fileobj)\n raise ValueError(\"Can't read from file object %r\" % fileobj)\n\n if not isinstance(data, bytes):\n raise ValueError(\n \"file object %r not opened in binary mode\" % fileobj)\n\n if writable:\n try:\n fileobj.write(b\"\")\n except Exception:\n if not hasattr(fileobj, \"write\"):\n raise ValueError(\"%r not a valid file object\" % fileobj)\n raise ValueError(\"Can't write to file object %r\" % fileobj)", "def file_open(self, path, method='r', content=''):\n\n f = open(path, method)\n if method == 'r' or method ==\"rb\": \n data = f.read()\n f.close()\n return data\n elif method == 'w' or method == 'a' or method == \"wb\":\n f.write(content)\n f.close()", "def test_get_file_binary_content(self):\n content = image_helper.get_file_binary_content(self.subject)\n\n self.assertGreater(len(content), 0)\n\n with open(self.subject, \"rb\") as f:\n original_content = f.read()\n\n self.assertEqual(content, original_content)", "def tofileobj(self, fileobj):\n # File name, 16 bytes\n name = self.name.encode('utf-8')\n if self.format is AR_FORMAT_SIMPLE:\n assert len(name) < 16\n fileobj.write('%-16s' % name)\n datasize = self.size\n elif self.format is AR_FORMAT_BSD:\n fileobj.write('#1/%-13s' % str(len(name)))\n datasize = self.size + len(name)\n\n # Modtime, 12 bytes\n fileobj.write('%-12i' % self.mtime)\n # Owner ID, 6 bytes\n fileobj.write('%-6i' % self.uid)\n # Group ID, 6 bytes\n fileobj.write('%-6i' % self.gid)\n # File mode, 8 bytes\n fileobj.write('%-8o' % self.mode)\n # File size, 10 bytes\n fileobj.write('%-10s' % datasize)\n # File magic, 2 bytes\n fileobj.write(AR_MAGIC_BIT)\n\n # Filename - BSD variant\n if self.format is AR_FORMAT_BSD:\n fileobj.write(name)" ]
[ "0.69785064", "0.684996", "0.65437925", "0.6293273", "0.62410367", "0.62361705", "0.61142075", "0.6071782", "0.6044688", "0.5978211", "0.59639037", "0.5924334", "0.5889725", "0.5753386", "0.5673814", "0.5615943", "0.5607474", "0.5558819", "0.55509293", "0.5521247", "0.5515765", "0.5493199", "0.5488867", "0.5482152", "0.5473375", "0.546671", "0.5456874", "0.545228", "0.54330564", "0.5408977", "0.5402464", "0.53781146", "0.5375507", "0.536525", "0.53596723", "0.5351057", "0.5347545", "0.5340135", "0.53206366", "0.53123057", "0.5295335", "0.5293614", "0.5293559", "0.5284031", "0.525571", "0.52538776", "0.5243629", "0.5243407", "0.5211798", "0.52065295", "0.51949495", "0.51944834", "0.51815796", "0.5178353", "0.51724964", "0.5163187", "0.51613915", "0.515152", "0.5151369", "0.5149821", "0.5130709", "0.511692", "0.51130897", "0.51093227", "0.51057786", "0.51051134", "0.5089299", "0.5086639", "0.5086507", "0.50864875", "0.5082824", "0.5076659", "0.50736064", "0.5067562", "0.50518024", "0.5048576", "0.50451905", "0.5043972", "0.50434244", "0.5034511", "0.5013791", "0.5009469", "0.5004973", "0.5002003", "0.49914575", "0.4989173", "0.49841648", "0.4983163", "0.4980862", "0.4980115", "0.49800855", "0.49781162", "0.4961514", "0.49554464", "0.4950789", "0.49506924", "0.49404576", "0.49373558", "0.4926167", "0.4920714" ]
0.5054163
74
Open File to write to and write input binary to the new file
def receiveAndUnpack(binary, filename): inputfile = open(filename, 'wb') inputfile.write(binary)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_file(original_path, new_path):\n print(f\"[INFO]: Transform data from binary to text file {new_path}\")\n with open(new_path, mode='wt', encoding='utf-8') as new_file:\n with open(original_path, mode='rb') as original_file:\n for line in original_file:\n new_file.write(line.decode())", "def write_binary(self, path):\n return", "def write_object_file_to_file(self, file_name):\n with open(file_name, 'wb+') as file:\n file.write(self.object_file.to_binary_array())", "def write (self, file):\n\t\tfile.write (self.pack ())", "def write_file(self):\n file = open(self.__file_path, 'w+')\n file.truncate(0)\n file.write(self.__content)\n file.close()", "def open_file(self):\n self.file = open(os.path.join(self.__output_dir,\n self.__file_name), 'a+')", "def _save_binary(file_name, data):\n with open(file_name, \"wb\") as f:\n cp.dump(data, f)", "def write_to_binary_file(self, filename):\n\n self.octree.writeBinary(str.encode(filename))", "def create(self):\n self.file = open(self.filename, \"xb\", buffering=self.bufferSize)", "def filewrite(self, filename, data):\n try:\n filedata = data.decode(\"utf-8\")\n except Exception:\n filedata = data\n lock = FileLock(filename)\n lock.acquire()\n with open(filename, 'w+') as f:\n f.write(filedata)\n lock.release()", "def save_to(self, f: BinaryIO):\n raise NotImplementedError", "def _Write(buf, filename):\n with open(filename, 'wb') as f:\n f.write(buf)", "def write(self, filename):\n pass", "def write(self, filename):\n pass", "def saveFile(self,newfile=None):\n if newfile == None:\n shutil.move(self.filename,self.filename+'~')\n self.handler = open(self.filename,'w')\n else:\n self.handler = open(newfile,'w')\n self.handler.writelines(self.content)\n self.handler.close()", "def write_to_file(file: Text, data: bytes):\n with open(file, \"wb\") as w:\n w.write(data)\n w.flush()", "def write_to_file(self, filename: str) -> None:", "def write_file(self):\n\n class ProtocolFile(object):\n def __init__(self, proto):\n self._proto = proto\n self._offset = 0\n\n def write(self, data):\n self._proto.write(data)\n self._offset += len(data)\n\n def tell(self):\n return self._offset\n\n def close(self):\n pass\n\n return ProtocolFile(self)", "def FileWrite(offset, buf):\r\n return _hiew.HiewGate_FileWrite(offset, buf)", "def WriteFilename(self):\n print(f\"Copying {self.input_file} to {self.output_file}\")\n copyfile(self.input_file, self.output_file)\n # Open r+b to open as binary for writing to\n with open(self.output_file, \"r+b\") as fh:\n seeker = (self.root_directory_offset*self.sector_size)+((self.index_number-1)*self.directory_index_size)\n # Convert to little-endian\n f_array = bytearray()\n print(f\"Reversing {self.filename}\")\n f_array.extend(map(ord, self.filename))\n #f_array.reverse()\n print(f\"f_array is {f_array}\")\n print(f\"Preparing to write {f_array} to {seeker}\")\n fh.seek(seeker)\n fh.write(f_array)\n e_array = bytearray()\n print(f\"Reversing {self.extension}\")\n e_array.extend(map(ord, self.extension))\n #e_array.reverse()\n print(f\"e_array is {e_array}\")\n print(f\"Preparing to write {e_array} to {seeker}\")\n fh.seek(seeker+8)\n fh.write(e_array)\n print(\"Filename and extension written to root directory\")\n return True", "def single_file_write(self, file_pointer, filename):\n temp_file = \"resources/temp_file\"\n\n file_pointer.seek(0)\n with open(temp_file, \"wb\") as output_file:\n shutil.copyfileobj(file_pointer, output_file)\n\n os.rename(temp_file, filename)\n log.info(\"Saved file: %s\", filename)", "def _update_ondisk(self):\n with open(self.orig_path, \"w\") as f:\n f.write(self.content)", "def filewrite(self, filename):\n io.write(self, filename)", "def write_file(bytes, path: str):\n\n newFile = open(path + \".rex\", \"wb\")\n newFile.write(bytes)\n return True", "def writeimage(self, fp):\n execfile = open(self.binpath, \"w\")\n databuf = fp.read(4096)\n while databuf:\n execfile.write(databuf)\n databuf = fp.read(4096)\n execfile.flush()\n execfile.close()\n os.chmod(self.binpath, stat.S_IRWXU)", "def write_output_file(updated_file, file_path):\n orig_file = file_path + \".orig\"\n # remove an existion .orig file\n if os.path.isfile(orig_file):\n os.remove(orig_file)\n # rename the current file\n os.rename(file_path, orig_file)\n # write the new file\n with open(file_path, mode='w', encoding='utf-8', newline='') as file_out:\n for line in updated_file:\n file_out.write(line)", "def write(self, fname):\n pass", "def write(cls, file, data):\n file.write(data)", "def write_bin(file, binary, buffer=None, append=True):\n\n # Get current stream, default or not.\n stream = cp.cuda.get_current_stream()\n\n if buffer is None:\n buffer = cp.asnumpy(binary)\n else:\n binary.get(out=buffer)\n\n if append is True:\n mode = \"ab\"\n else:\n mode = \"wb\"\n\n with open(file, mode) as f:\n stream.synchronize()\n buffer.tofile(f)", "def write(self, fileW):\n fileW.wByte(self.b)\n fileW.wByte(self.g)\n fileW.wByte(self.r)\n fileW.wByte(self.a)", "def writefile(name, instream, start=None, end=None, append=False):", "def get_file(self, size):\n file = open(self.FILENAME, \"w\")\n file.seek(1024 * 1024 * size)\n file.write('\\x00')\n file.close()", "def test_file_bin_readwrite(self):\n FileWriter(self.binary_path).write_bin(self.binary_string)\n bin_data = FileReader(self.binary_path).read_bin()\n self.assertEqual(bin_data, self.binary_string)", "def write(self, data_to_write):\n self.single_file.write(data_to_write)\n self.single_file.flush()", "def binary_write(iring, file_ext='out', *args, **kwargs):\n return BinaryFileWriteBlock(iring, file_ext, *args, **kwargs)", "def WriteFile(fname, data):\n #self._out.Info(\"Write file '%s' size %d (%#0x)\" %\n #(fname, len(data), len(data)))\n with open(Filename(fname), 'wb') as fd:\n fd.write(data)", "def write_bytes_to_file(bytes, filename):\n try:\n with open(filename, mode=\"bx\") as file:\n file.write(bytes)\n except FileExistsError:\n os.remove(filename)\n ResourceHandler.write_bytes_to_file(bytes, filename)\n except Exception as e:\n print(e)", "def open(self):\n self.file = open(self.filename, \"rb\", buffering=self.bufferSize)", "def write_file(self):\n print 'Writing '+self.name+' binary...'\n if self.vals is not None:\n if len(self.vals) == self.size:\n stream = self.pack_mem()\n with open(self.name+'.bin','wb') as f:\n f.write(stream)\n print 'File written: '+self.name+'.bin'\n else:\n print 'Error: input array for '+self.name+'is not the right '+\\\n 'size (should be '+str(self.size)+'). Skipping.'\n else:\n print 'No array provided, skipping.'", "def _WriteFileEntry(self, file_entry, data_stream_name, destination_file):\n source_file_object = file_entry.GetFileObject(\n data_stream_name=data_stream_name)\n if not source_file_object:\n return\n\n try:\n with open(destination_file, 'wb') as destination_file_object:\n source_file_object.seek(0, os.SEEK_SET)\n\n data = source_file_object.read(self._COPY_BUFFER_SIZE)\n while data:\n destination_file_object.write(data)\n data = source_file_object.read(self._COPY_BUFFER_SIZE)\n\n finally:\n source_file_object.close()", "def newfile(filename):\n # Open the new file for writing\n with open(filename, \"w\") as file:\n pass", "def writeFile(self, f, fname):\n with open(self.PATH + fname, 'wb') as handle:\n pickle.dump(f, handle, protocol = pickle.HIGHEST_PROTOCOL)", "def write(self, file):\n pos = file.tell()\n pickle.dump((self.index, self.meta, self.info), file)\n file.seek(0)\n\n # update the header with the position of the content index.\n file.write(struct.pack('<Q', pos))", "def writefile(path, instream, start=None, end=None, append=False):", "def write_file(self):\n if self._write_file == None:\n return\n\n try:\n out = file(self._write_file, \"w\")\n except IOError, e:\n print e\n sys.exit(1)\n out.writelines(\"A cases\") \n out.close()", "def write_filepath(self, filename, file_format='FASTA', zipout=False):\n \n file_obj = open_with_intermediates(filename,'w')\n if zipout:\n file_obj.close()\n file_obj = StringIO()\n self.write(file_obj, file_format=file_format)\n if zipout:\n import gzip\n file_obj_gz = gzip.open(filename, \"wb\", 6)\n file_obj_gz.write(str.encode(file_obj.getvalue()))\n file_obj_gz.close()\n file_obj.close()", "def write_filepath(self, filename, file_format='FASTA', zipout=False):\n \n file_obj = open_with_intermediates(filename,'w')\n if zipout:\n file_obj.close()\n file_obj = StringIO()\n self.write(file_obj, file_format=file_format)\n if zipout:\n import gzip\n file_obj_gz = gzip.open(filename, \"wb\", 6)\n file_obj_gz.write(str.encode(file_obj.getvalue()))\n file_obj_gz.close()\n file_obj.close()", "def write_raw(content, file_path):\n file = open(file_path, 'wb')\n file.write(content)\n file.close()", "def write_bytes_to_image(self, file_path):\n data_manipulation.bytes_to_image(self.bytes, file_path)", "def write(self, filename, data):\n raise NotImplementedError", "def write_content_file(file_content):\n\n f = open(workingfile, \"w\")\n if hasattr(file_content, \"decode\"):\n f.write(str(file_content.decode(\"utf-8\")))\n else:\n f.write(str(file_content))\n f.close()", "def write_file(self, filehandle, filename):\n filehandle.seek(0)\n backuppath = os.path.join(self.FTP_PATH, filename)\n self.ftp.storbinary('STOR ' + backuppath, filehandle)", "def write(self, filename): # real signature unknown; restored from __doc__\n pass", "def to_file(self, file_io):\n pickle.dump(self.__object, file_io)", "def write_file(file, content):\n with open(file, \"w\") as fid:\n fid.write(content)", "def open(self):\n self.f = open(self.join(self.fname), 'rb')", "def write_binary(self, data, ensure=False):\n if ensure:\n self.dirpath().ensure(dir=1)\n with self.open(\"wb\") as f:\n f.write(data)", "def to_file(self, file_path, smirnoff_data):\n pass", "async def write_file(self, directory: str, name: str, file: bytes):\n pass", "def create_or_update_file(\n file_path: str, file_content: str = \"\", file_content_encoding: str = \"utf-8\"\n) -> None:\n with open(file_path, \"wb+\") as file:\n file.write(file_content.encode(file_content_encoding))", "def UpdateFile(self, modID = None):\n if modID is None:\n modID = self.modActive\n\n source = self.modules[modID][1]\n filename = self.modules[modID][2]\n\n try:\n file = open(filename, \"wt\")\n file.write(source)\n finally:\n file.close()", "def write_to_binary_file(self, loc: str, data: bytes):\n try:\n os.mkdir(\"../\" + self.uri)\n except FileExistsError:\n pass\n\n f = open(\"../\" + self.uri + loc, \"wb\")\n f.write(data)\n print(\"[WRITE] written to binary file loc\")\n f.close()", "def beginFileOutput(self):\n self._outputFilepath = self.dataSet[self._outputFileLabel]\n self._outputFile = open(self._outputFilepath, 'w')", "def write(self, instream: typ.BinaryIO, filepath: str,\r\n filename: str = None) -> None:\r\n if filename is not None:\r\n filename = path.basename(filename)\r\n if self.fs_type == 'FAT':\r\n allocator_metadata = self.fs.write(instream, filepath)\r\n self.metadata.add_file(filename, allocator_metadata)\r\n elif self.fs_type == 'NTFS':\r\n allocator_metadata = self.fs.write(instream, filepath)\r\n self.metadata.add_file(filename, allocator_metadata)\r\n else:\r\n raise NotImplementedError()", "def write(self, output: Any) -> None:\n self._original.write(output)\n self._handler.file_write(self._name, output)", "def rewrite_all_file(self, data):\r\n with open(self.file_name, 'w', encoding='utf-8') as self.file:\r\n self.file.write(data)", "def file_write(self, filename, contents, append=True, create=True):\n return self._file_model.file_write(filename, contents, append=append, create=create)", "def _write_file(self, filename, content, mode=None):\n with open(filename, 'w') as fp:\n fp.write(dedent(content).strip())\n fp.write('\\n')\n\n if mode is not None:\n os.chmod(filename, mode)", "def writeFile(self,fileLink,fileBuffer,testChars=''):\n # 026 Unit test should test also urllib file like object aside the real file.\n #self.debug.printHeader() # Too many times -- need to move to debuglevel=4\n filePath=fileLink.replace('http://','')\n [fileDir,fileName]=os.path.split(filePath)\n if not os.path.exists(self.pathStorage.workDir()+os.sep+fileDir): os.makedirs(self.pathStorage.workDir()+os.sep+fileDir)\n localFile=file(self.pathStorage.workDir()+os.sep+fileDir+os.sep+fileName,'wb')\n localFile.write(testChars)\n localFile.write(fileBuffer.read())\n localFile.close()", "def bin_writer(fpath, fname, data):\n path = fpath + fname + '.dat'\n with open(path, 'ab') as file:\n for row in data:\n file.write(row.encode('utf-8'))\n return None", "def encrypt_file(self, input_file_name='', output_file_name=''):\n # Checking input and output file\n assert input_file_name and isfile(input_file_name), \"Input file wasn't selected!\"\n assert output_file_name, \"Output file wasn't selected!\"\n\n # Encrypting file and saving result\n alpha = pow(self.keys['public']['g'], self.keys['session'], self.keys['public']['p'])\n try:\n debug_message('Encrypting...')\n with open(output_file_name, 'w') as f:\n for _byte in self._open_file_binary(input_file_name):\n beta = self.encrypt_byte(_byte)\n f.write(str(alpha) + '\\n')\n f.write(str(beta) + '\\n')\n except Exception:\n debug_message(f\"Error occurred while encrypting file ({Exception})\")\n raise AssertionError(f\"File encrypting error! ({Exception})\")\n\n return 1", "def _toFile(self):\n pass", "def saveDataToFile(fullFilename, binaryData):\n with open(fullFilename, 'wb') as fp:\n fp.write(binaryData)\n fp.close()\n # logging.debug(\"Complete save file %s\", fullFilename)", "def copy_file(file_name, new_file_name):\n\n import os\n\n if not os.path.exists(file_name):\n raise FileNotFoundError\n\n with open(str(file_name), 'rb') as infile:\n with open(str(new_file_name), 'wb') as outfile:\n while True:\n buff = infile.read(10240)\n if buff:\n outfile.write(buff)\n else:\n break\n\n return", "def update_file(this_file, new_lines):\r\n file_format = get_file_format(this_file)\r\n return new_write_file(this_file, new_lines, file_format=file_format)", "def _amber_write_input_file(self):\n logger.debug(\"Writing {}\".format(self.input))\n with open(os.path.join(self.path, self.input), \"w\") as f:\n f.write(\"{}\\n\".format(self.title))\n f.write(\" &cntrl\\n\")\n self._write_dict_to_mdin(f, self.cntrl)\n\n if self.ewald is not None:\n f.write(\" &ewald\\n\")\n self._write_dict_to_mdin(f, self.ewald)\n\n if self.cntrl[\"nmropt\"] == 1:\n if self.wt is not None:\n for line in self.wt:\n f.write(\" \"+line+\"\\n\")\n f.write(\" &wt type = 'END', /\\n\")\n if self.restraint_file is not None:\n f.write(\"DISANG = {}\\n\".format(self.restraint_file))\n f.write(\"LISTOUT = POUT\\n\\n\")\n if self.group is not None:\n f.write(\"{:s}\".format(self.group))", "def testWriteBinaryData(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteBinaryData(b'Binary data')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Binary data'\n self.assertEqual(output_data, expected_output_data)", "def write(self, filename=None):\n # Take filename and expand tilde.\n if filename is not None:\n self.filename = filename\n assert self.filename\n filename = os.path.expanduser(self.filename)\n\n # Write it.\n with codecs.open(filename, 'w', self.encoding) as f:\n f.write(self.buffer.text)\n\n self._file_content = self.buffer.text", "def copy_file(file, destination):\n with open(file, 'rb') as infile, open(destination, 'wb') as outfile:\n outfile.write(infile.read())", "def write(self, content, mode='wb'):\r\n self.localpath.write(content, mode)", "def make_file(self):\n\n f = open(get_output_path(), \"w\")\n \n f.write(self.export())\n \n f.close()\n\n return self", "def write(self, filename, data):\n owner_rw = 0600\n fd = os.open(filename, os.O_WRONLY | os.O_CREAT, owner_rw)\n # In case file existed already with wrong permissions, fix them.\n os.chmod(filename, owner_rw)\n os.write(fd, data)\n os.close(fd)", "def _write_output_file(output: str, file_name: str):\n\tfile1 = open(file_name, 'w')\n\tfile1.write(output)\n\tfile1.close()", "def test_output_file_to_input_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Create an output file instance\n output_file = PyArrowFileIO().new_output(location=f\"{output_file_location}\")\n\n # Create the output file and write to it\n with output_file.create() as output_stream:\n output_stream.write(b\"foo\")\n\n # Convert to an input file and confirm the contents\n input_file = output_file.to_input_file()\n with input_file.open() as f:\n assert f.read() == b\"foo\"", "def _targetFile(self):\n basename = os.path.basename(self.src)\n filename = os.path.join(self.target_dir, basename)\n return open(filename, 'w')", "def __init__(self, File):\n StringOutputStreamBase.__init__(self)\n self.file = open(File, 'w')", "def finish(self):\n self._fout.add_size()\n self._fout.add_padding()\n self._fout.close()", "def write(self, filename):\n bvh_string = self.generate_bvh_string()\n if filename[-4:] == '.bvh':\n filename = filename\n else:\n filename = filename + '.bvh'\n with open(filename, 'w') as outfile:\n outfile.write(bvh_string)", "def writeFile(file_name, file_text, mode='w+'):\n with open(file_name, mode) as file:\n file.write(file_text)", "def case_convert_file_to_file(source_path: str, dest_path: str, style: CaseStyleEnum) -> None:\n with open(source_path, 'r') as f:\n contents = f.read()\n new_contents = case_convert_stream(contents, style)\n with open(dest_path, 'w') as f:\n f.write(new_contents)", "def write(self, filename, data, hdr):\n pass", "def copyfile(self, source, outputfile):\n shutil.copyfileobj(source, outputfile)", "def copyfile(self, source, outputfile):\n shutil.copyfileobj(source, outputfile)", "def copyfile(self, source, outputfile):\n shutil.copyfileobj(source, outputfile)", "def write_file(filename):\r\n if Py3:\r\n return open(filename, \"w\", newline='')\r\n return open(filename, \"wb\")", "def write_file(self, i, path, fout):\n\n test_file = path + '/' + self.output[i]\n # Write file name\n print(test_file, file=fout, end='\\n\\n')\n\n extension = os.path.splitext(test_file)[1]\n if extension == '.fits' or extension == 'FITS':\n import subprocess\n prog = self.bindir + '/fits2ascii.py -i ' + test_file\n output = subprocess.check_output(prog.split(), shell=False)\n data = output.decode()\n else:\n fin = open(test_file, 'r')\n data = fin.read()\n fin.close()\n #fout.write(data)\n print(data, file=fout)\n print(file=fout, end='\\n')", "def reopen(self):\n self.close()\n self._fileobj = os.fdopen(os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), \"r+b\", 0)", "def compressFile(source, target):\n data = cake.filesys.readFile(source)\n try:\n data = zlib.compress(data, 1)\n except zlib.error, e:\n raise EnvironmentError(str(e))\n cake.filesys.writeFile(target, data)", "def write_file(content, file_path, mode='w', encoding='utf-8'):\n with codecs.open(file_path, mode, encoding=encoding) as fid:\n fid.write(content)", "def newfile(self) :\n\n\t\tfrom tempfile import mkstemp\n\t\timport os\n\t\tglobal configurer\n\n\t\tfd,name = mkstemp(suffix='.blend')\n\t\tos.close(fd)\n\t\tself.name = name\n\t\tfd = open(name,'wb', configurer.get('ServerBufferSize'))\n\t\tself.fd = fd\n\t\tprint name\n\t\treturn 1", "def overwrite_file(self):\n\n new_file = open(self.temp_filename, 'r')\n file = open(self.filename, 'w')\n file.writelines(new_file.readlines())\n new_file.close()\n file.close()\n os.remove(self.temp_filename)" ]
[ "0.6816833", "0.6505264", "0.637456", "0.6351842", "0.6127052", "0.6121467", "0.6088371", "0.60827595", "0.60820264", "0.6010023", "0.59770185", "0.59106725", "0.59103024", "0.59103024", "0.5909376", "0.5897663", "0.5867396", "0.5844806", "0.58442897", "0.5841373", "0.58408546", "0.5836664", "0.58235", "0.5810557", "0.580361", "0.577133", "0.5753825", "0.57383037", "0.57359964", "0.57335097", "0.5712313", "0.5711228", "0.5696013", "0.56861615", "0.5676979", "0.5671198", "0.5658546", "0.56573284", "0.5654401", "0.5641224", "0.5634643", "0.5634038", "0.563327", "0.5623686", "0.5596648", "0.55944496", "0.55944496", "0.5590027", "0.55783355", "0.5572529", "0.5567841", "0.556278", "0.5560704", "0.5551681", "0.5550034", "0.55445457", "0.55413246", "0.5538514", "0.5537626", "0.55283654", "0.55255955", "0.5518655", "0.54928076", "0.54896843", "0.5486664", "0.54795104", "0.54785186", "0.5476008", "0.54734045", "0.54557323", "0.5453542", "0.5449621", "0.54314023", "0.5428491", "0.54281485", "0.5428033", "0.54279745", "0.5421886", "0.54172796", "0.54049546", "0.5400062", "0.53954065", "0.5392843", "0.53902954", "0.53889835", "0.5388829", "0.53839386", "0.53827155", "0.53801215", "0.53746146", "0.53720546", "0.5370094", "0.5370094", "0.5370094", "0.53620905", "0.535699", "0.535656", "0.535515", "0.5353713", "0.53462005", "0.53353244" ]
0.0
-1
Calculates % of alphanumeric characters in string.
def _alnum_percent(line): total = len(line) test_set = set() for letter in string.ascii_letters: test_set.add(letter) test_set.add(' ') # Return a failure (no good characters) if there are no characters if total < 1: return 0 alnum_count = 0 star_count = 0 bar_count = 0 for letter in line: # if letter.isalnum(): if letter in test_set: alnum_count += 1 if letter == '*': star_count += 1 if letter == 'I' or letter == 'i' or letter == 'l' or letter == '|': bar_count += 1 # TODO(searow): properly implement this, but sticking this here for now. if star_count / total > 0.1: return 0 if bar_count / total > 0.5: return 0 return alnum_count / total
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def letter_percent(s):\r\n\r\n alpha = 'abcdefghijklmnopqrstuvwxyz'\r\n s_lower = s.lower()\r\n s_length = 0\r\n letter_count = {} # empty dictionary\r\n keys = letter_count.keys()\r\n\r\n for char in s_lower:\r\n if char in alpha:\r\n s_length = s_length + 1\r\n if char in letter_count:\r\n letter_count[char] = letter_count[char] + 1\r\n else:\r\n letter_count[char] = 1\r\n\r\n for char in sorted(keys):\r\n letter_count[char] = (letter_count[char] / s_length) * 100\r\n print(char, \"{:.1f}%\".format(letter_count[char]))", "def percent_without_letter(l):\n\treturn len(words_without_letter(l)) / len(word_set)", "def escPercent(text):\n pat = re.compile(r'%(?!\\()')\n return pat.sub('%%', text)", "def token_percentage(word, text):\n word_count = text.count(word)\n text_len = len(text)\n return percentage(word_count, text_len)", "def count_string_indiv(num, num_patients):\n output = \"%.0f/\" % num\n output += str(num_patients)\n if num_patients is not 0:\n percentage = (num / num_patients) * 100\n else:\n percentage = 0.0\n output += ' (%.1f%%)' % percentage\n return output", "def analyze_text(text):\n # Your code here\n total=0\n total_es = 0\n\n for c in text:\n if c.isalpha():\n total += 1\n\n total_es = text.lower().count('e')\n percentage = float(total_es/total)\n\n print(total, total_es, percentage * 100)", "def alphanum_score(words):\n\n\t# Add your code here\n\treturn", "def percent_of(part, whole):\n return part * 100 / whole", "def frequencyLetterDic(s):\n pass", "def convert_percent_str(x):\n if x:\n return float(str(x).strip(\"% \"))\n return 0", "def calc_percent(byte_counter, data_len):\n if data_len is None or not data_len:\n # case where length is not present in metadata or zero\n return '---.-%'\n return '%6s' % ('%3.1f%%'\n % (float(byte_counter) / float(data_len) * 100.0))", "def myHash(string, base=91, mod=1000000321):\n value = 0\n for pos, elem in enumerate(string[::-1]): # считаем значение полинома\n value += ord(elem) * base**pos # в последней задаче сделано с помощью массива (динамика)\n return value % mod", "def percent_encode(src):\n\tdst = ''\n\treserved = re.compile('[A-Za-z0-9-._~]')\n\tfor char in src:\n\t\tif reserved.search(char):\n\t\t\tdst += char\n\t\telse:\n\t\t\t# Percent encode needed chars\n\t\t\t# Convert each byte to hex\n\t\t\tfor byte in list(char.encode('UTF-8')):\n\t\t\t\tif reserved.search(chr(byte)):\n\t\t\t\t\tdst += chr(byte)\n\t\t\t\telse:\n\t\t\t\t\tdst += '%'+hex(byte)[2:].upper()\n\treturn dst", "def at_content(seq):\n result = float(str(seq).count('A') + str(seq).count('T'))/len(seq) *100\n return result", "def getGCpercentage(DNA):\n dnaLength = len(DNA) #counts the length of the DNA string\n findG = DNA.count(\"G\") #finds the letter G in DNA string\n findC = DNA.count(\"C\") #finds the letter C in DNA string\n print(findG)\n print(findC)\n print(dnaLength)\n GCpercent = ((findC + findG)/dnaLength) * 100 #calculates percentage of Gs and Cs\n print(\"Percentage of G and C:\",\" %6.2f\" % GCpercent)\n \n return getGCpercentage", "def percent_str(part, total):\n return str(round(100 * float(part) / float(total), 2)) + '%'", "def format_percentage(num):\n return \"{}%\".format(num)", "def customHashFunc(str):\n return sum(ord(chr) for chr in str)%128", "def gc_rate(dna: str, percent=False):\n c = Counter(dna)\n result = (c[\"G\"] + c[\"C\"]) / len(dna)\n return result * 100 if percent else result", "def _get_accuracy(text):\n sta_obj = [m.start() for m in re.finditer('%',text)]\n return([float(text[x-3:x:1]) for x in sta_obj])", "def readable_percent(value, d):\n return \"%s %%\" % (str(round(100.0*float(value), int(d))))", "def random_characters(alpha, numeric_percent_chance=20):\n\n random.seed()\n string_length = len(alpha)\n alphanumeric = ''\n\n for i in range(0, string_length):\n check_int = random.randrange(1, 100)\n\n if check_int <= numeric_percent_chance:\n alphanumeric += str(alpha_to_leet(alpha[i]))\n else:\n alphanumeric += alpha[i]\n\n return alphanumeric", "def letter_freq( text ):\n\tchars = string.ascii_uppercase\n\ttext = text.upper()\n\tresult = get_letter_dict()\n\ttotal = 0\n\tfor char in chars:\n\t\tcount = text.count(char)\n\t\tresult[char] = count\n\t\ttotal += count\n\tif total != 0:\n\t\tfor char in chars:\n\t\t\tresult[char] = (result[char]*10000 / total) / float(100)\n\treturn result", "def get_freq(string:str) -> float:\n import numpy\n try:\n freq = float(string.replace(\"%\", \"\")) / 100\n except AttributeError as e:\n # if string is np.nan\n freq = numpy.nan\n return freq", "def h_ascii(key, N):\n if type(key) == str:\n if type(N) == int:\n s = 0\n for i in range(len(key)):\n s += ord(key[i])\n return s % N\n else:\n raise ValueError\n else:\n raise ValueError", "def percentCommand(self):\n if self.digits[\"text\"] == '0':\n return\n else:\n number = float(self.digits[\"text\"])\n number /= 100\n self.digits[\"text\"] = str(number)\n return self.digits[\"text\"]", "def per(a):\n return a * 100", "def alpha_percent_normalize(perc):\n\n alpha_float = clamp(float(perc.strip('%')), 0.0, 100.0) / 100.0\n alpha_dec = fmt_float(alpha_float, 3)\n alpha = \"%02X\" % round_int(alpha_float * 255.0)\n return alpha, alpha_dec", "def percent(value):\n return f\"{value:,.2f} %\"", "def add_percentage(grade):\n\tif type(grade) == float:\n\t\tperc_grade = str(grade) + '%'\n\t\treturn perc_grade\n\telse:\n\t\treturn grade", "def represent_percent(self, dlpkgs, numpkgs, length):\n if dlpkgs == 0:\n return '{0:^{1}s}'.format('-', length)\n else:\n return '{0:^{1}s}'.format(self.pct(dlpkgs, numpkgs), length)", "def calculate_ac(str1, str2):\n\n total_letters = len(str1)\n ocr_letters = len(str2)\n if total_letters == 0 and ocr_letters == 0:\n acc_by_char = 1.0\n return acc_by_char\n diff = difflib.SequenceMatcher(None, str1, str2)\n correct_letters = 0\n for block in diff.get_matching_blocks():\n correct_letters = correct_letters + block[2]\n if ocr_letters == 0:\n acc_by_char = 0\n elif correct_letters == 0:\n acc_by_char = 0\n else:\n acc_1 = correct_letters / total_letters\n acc_2 = correct_letters / ocr_letters\n acc_by_char = 2 * (acc_1 * acc_2) / (acc_1 + acc_2)\n\n return float(acc_by_char)", "def percentage(part, whole):\n return round((100 * float(part)/float(whole)),2)", "def ghchance_plain(x: int) -> str:\n assert x % 100 == 0\n return '%d%' % (x // 100)", "def have_mod_symbol(l):\r\n if \"%\" in str(l):\r\n return 1\r\n else:\r\n return 0", "def percentify(n):\n if n == 0:\n return n\n return '{:.2f}{}'.format(n*100, '%')", "def value_to_percent(value):\n return ...", "def base26(w):\n val = 0\n for ch in w.lower():\n next_digit = ord(ch) - ord('a')\n val = 26*val + next_digit\n return val", "def score(str_bytes):\n freq_score = sum([character_frequencies.get(chr(letter).lower(), -100) for letter in str_bytes])\n return math.ceil(freq_score * 100) / 100", "def one_pass(self, s: str) -> str:\n alpha_map = {\n '1': 'a', '2': 'b', '3': 'c', '4': 'd', '5': 'e', '6': 'f', '7': 'g',\n '8': 'h', '9': 'i', '10': 'j', '11': 'k', '12': 'l', '13': 'm', '14': 'n',\n '15': 'o', '16': 'p', '17': 'q', '18': 'r', '19': 's', '20': 't',\n '21': 'u',\n '22': 'v', '23': 'w', '24': 'x', '25': 'y', '26': 'z'\n }\n\n i, res = 0, ''\n while i < len(s):\n if i + 2 < len(s) and s[i + 2] == '#':\n res += alpha_map[s[i:i + 2]]\n i += 3\n else:\n res += alpha_map[s[i]]\n i += 1\n return res", "def escPercent(self) :\n if self.minfile[self.pos : self.pos+7] == r\"-12345X\" :\n #self.logdebug(\"Generic ESCAPE sequence at %08x\" % self.pos)\n self.pos += 7\n buffer = []\n quotes = 0\n char = chr(self.readByte())\n while ((char < ASCIILIMIT) or (quotes % 2)) and (char not in (FORMFEED, ESCAPE, NUL)) : \n buffer.append(char)\n if char == '\"' :\n quotes += 1\n char = chr(self.readByte())\n self.setPageDict(\"escaped\", \"\".join(buffer))\n #self.logdebug(\"ESCAPED : %s\" % \"\".join(buffer))\n self.pos -= 1 # Adjust position\n else : \n while 1 :\n (value, end) = self.getInteger()\n if end == 'B' :\n self.enterHPGL2()\n while self.minfile[self.pos] != ESCAPE :\n self.pos += 1\n self.pos -= 1 \n return \n elif end == 'A' : \n self.exitHPGL2()\n return\n elif end is None : \n return", "def compute_probabilities(text, X=alph):\n\n # Convert to lowercase (just to be sure)\n text = text.lower()\n\n # Make empty dictionary with letters as keys\n counts = {k: 0 for k in X}\n\n # Keep track of total length of legitimate characters\n total = 0\n\n # Loop through text and update counts only for alphabet\n for c in text:\n if c in X:\n total += 1\n counts[c] += 1\n\n # Normalise the counts and return\n return {k: c / total for k, c in counts.items()}", "def gc_content(self, letters='CGS'):\n if len(self) == 0:\n denom = 1.\n else:\n denom = float(self._N)\n letters = [x.upper() for x in letters] + [x.lower() for x in letters]\n letters = list(set(letters))\n counter = sum(self._data.count(x) for x in letters)\n return 100. * counter / denom", "def english_probability(s: str) -> float:\n\n # Use cosine similarity to determine how much the string resembles\n # the letter distribution of English.\n\n # But first, if there are non-ascii characters in the result string, it is\n # certainly not English, so we can stop early\n for c in s:\n if c not in string.printable:\n return 0\n\n # Now, begin by making `s` into a vector of fractions, representing how\n # often each character appears. Must have the same character set as\n # CHAR_FREQ for the math to work correctly, so throw out other characters.\n cntr = Counter(c.lower() for c in s if c in CHAR_FREQ.keys())\n total_chars = sum(cntr.values())\n vec = {c: freq/total_chars for c, freq in cntr.items()}\n\n # Do the actual calculation. `vec` is 'a' and `CHAR_FREQ` is 'b'\n a_dot_b = sum(pair[0] * pair[1] for pair in zip_dict(vec, CHAR_FREQ))\n mag_a = sqrt(sum(freq**2 for freq in vec.values()))\n mag_b = sqrt(sum(freq**2 for freq in CHAR_FREQ.values()))\n\n return a_dot_b / (mag_a * mag_b)", "def perc_str_to_int(string: str) -> int:\n match = re.search(r\"\\((\\d+)%\\)$\", string)\n if match:\n return int(match.group(1))\n raise ValueError(\"Cannot find percentage in table\")", "def percent(num):\n return round(num * 100, 1)", "def percent_frequencies(self):\n word_count = 0\n local = self.frequencies()\n for key in local.keys():\n i = local[key]\n word_count += int(i)\n for key in local.keys():\n i = local[key]\n percentage = float(i) / float(word_count)\n local[key] = percentage\n return local", "def format_score(att, hts):\n return str(att) + \"/\" + str(hts)", "def hash_function(self, x):\n if not x:\n return -1\n hashed_value = 0\n\n for char in x:\n hashed_value = 181 * hashed_value + ord(char)\n\n return hashed_value % self.capacity", "def letter_text_analyzer(text, letter):\n count = 0\n text_mod = \"\"\n for char in text:\n if char not in string.punctuation:\n text_mod += char\n text_split = text_mod.split()\n\n for word in text_split:\n if letter in word:\n count += 1\n result = \"Your text contains {0} words,of which {1} ({2:.1%}) contain an '{3}'\"\n\n return result.format(len(text_split), count, count / len(text_split), letter)", "def escape_like(string, escape_char='*'):\n return (\n string\n .replace(escape_char, escape_char * 2)\n .replace('%', escape_char + '%')\n .replace('_', escape_char + '_')\n )", "def ghchance(x: int) -> str:\n assert x % 100 == 0\n return '%d%%' % (x // 100)", "def alpha_score(upper_letters):\r\n return sum(map(lambda l: 1 + ord(l) - ord('A'), upper_letters))", "def getPercent(*args):", "def getPercent(*args):", "def alphanum_score(words):\n #Our score variable.\n\tscore = 0\n\n #Iterating over the list\n for i in words:\n if i.isalpha(): #A method to check if the word contains only alphabets.\n score += 1\n elif i.isdigit():#A method to check if the word contains only numbers.\n score -= 1 \n \n\treturn score", "def urlencodeall(str):\n if not str:\n return \"\"\n\n return string.join(['%' + s.encode('hex') for s in str], '')", "def safe_modulo(s, meta, checked=\"\", print_warning=True, stacklevel=2):\n try:\n return s % meta\n except (ValueError, TypeError, KeyError):\n # replace the missing fields by %%\n keys = substitution_pattern.finditer(s)\n for m in keys:\n key = m.group(\"key\")\n if not isinstance(meta, dict) or key not in meta:\n if print_warning:\n warn(\n \"%r is not a valid key!\" % key,\n SyntaxWarning,\n stacklevel,\n )\n full = m.group()\n s = s.replace(full, \"%\" + full)\n if \"KEY\" not in checked:\n return safe_modulo(\n s,\n meta,\n checked=checked + \"KEY\",\n print_warning=print_warning,\n stacklevel=stacklevel,\n )\n if not isinstance(meta, dict) or \"VALUE\" in checked:\n raise\n s = re.sub(\n r\"\"\"(?<!%)(%%)*%(?!%) # uneven number of %\n \\s*(\\w|$) # format strings\"\"\",\n \"%\\g<0>\",\n s,\n flags=re.VERBOSE,\n )\n return safe_modulo(\n s,\n meta,\n checked=checked + \"VALUE\",\n print_warning=print_warning,\n stacklevel=stacklevel,\n )", "def count_string(counts_series, num_patients):\n output = \"\"\n for label in counts_series.keys():\n output += label + \" = %.0f\" % counts_series[label]\n percent = (counts_series[label] / num_patients) * 100\n output += ' (%.1f%%)\\n' % percent\n return output[:-1] # take off the final \\n", "def percent_parse(pstring):\n if pstring.strip().endswith('%'):\n return int(pstring.strip()[:-1]) / 100\n else:\n return np.nan", "def num_alphabet(first_val: str, second_val: str):\n el_1 = ord(first_val) - ord('a') + 1\n el_2 = ord(second_val) - ord('a') + 1\n distance = abs(el_2 - el_1 - 1)\n return f'Позиции букв: {el_1} и {el_2}. Между буквами символов: {distance} '", "def clean(key):\n\treturn key.strip().replace('%','').replace(' ', '-')", "def safe_modulo(s, meta, checked='', print_warning=True, stacklevel=2):\n try:\n return s % meta\n except (ValueError, TypeError, KeyError):\n # replace the missing fields by %%\n keys = substitution_pattern.finditer(s)\n for m in keys:\n key = m.group('key')\n if not isinstance(meta, dict) or key not in meta:\n if print_warning:\n warn(\"%r is not a valid key!\" % key, SyntaxWarning,\n stacklevel)\n full = m.group()\n s = s.replace(full, '%' + full)\n if 'KEY' not in checked:\n return safe_modulo(s, meta, checked=checked + 'KEY',\n print_warning=print_warning,\n stacklevel=stacklevel)\n if not isinstance(meta, dict) or 'VALUE' in checked:\n raise\n s = re.sub(r\"\"\"(?<!%)(%%)*%(?!%) # uneven number of %\n \\s*(\\w|$) # format strings\"\"\", r'%\\g<0>', s,\n flags=re.VERBOSE)\n return safe_modulo(s, meta, checked=checked + 'VALUE',\n print_warning=print_warning, stacklevel=stacklevel)", "def gc_content(seq):\n result = float(str(seq).count('G') + str(seq).count('C'))/len(seq) *100\n return result", "def calculate_weighted_hash(cls, word):\n\n hash_value = 0\n for char in word:\n hash_value += cls.alpha_lookup[char.lower()]\n return hash_value", "def pitch_name_to_base40(name):\n validate_pitch_name(name)\n base40 = letter_to_base40(name[0])\n base40 += name.count('#') - name.count('b')\n base40 += int(name.replace('#', '').replace('b', '')[1:]) * 40\n return base40", "def alpha_score(upper_letters):\r\n return sum(map(lambda l: 1 + ord(l) - ord('A'), upper_letters))", "def letterFreq(words):\n dict = {}\n total = 0\n for word in words:#Iterate through words\n for letter in word:#Increment by letter\n count = 0\n for yearCount in words[word]:\n count += yearCount.count#Increment total instances of word\n total += count#Count total letters\n if letter in dict:\n dict[letter] += count#Add to existing entry\n else:\n dict[letter] = count#Create new entry\n \"\"\"CODE FOR THE WHOLE ALPHABET\"\"\"\n list = []\n for letter in ascii_lowercase:\n if letter in dict and dict[letter] != 0:\n list.append(dict[letter] / total)#Convert to relative\n else:\n list.append(0.0)#Fill alphabet\n return list", "def s_words(words):\n\t\n\treturn words // 100 / 10", "def percent_decode(value):\n return unquote_plus(unicode_to_utf8(value))", "def percentage(context, num, total_num):\n\n p = float(num)/float(total_num) * 100\n percent = str(p) + \"%\"\n return percent", "def percent_identity(align_1, align_2):\n matches = 0\n for i in range(len(align_1)):\n if align_1[i] == align_2[i]:\n matches+= 1\n percent_identity = matches / len(align_1)\n return percent_identity", "def getWordScore(word, n):\n score=0\n for i in range(len(word)):\n addition=SCRABBLE_LETTER_VALUES[word[i]]\n score+=addition*(len(word))\n if len(word)==n:\n score+=50\n return score", "def checksum(value: str) -> str:\n return chr(65 + sum(CHECKSUM_TABLE[index % 2][ALPHANUMERICS_DICT[char]] for index, char in enumerate(value)) % 26)", "def hashstring(astring, tablesize):\n \n sum = 0\n for pos in range(len(astring)):\n # to account for anagrams, we give weightage to positions of the letters to give different hash values\n sum = sum + ord(astring[pos]) * (pos + 1)\n \n return sum % tablesize", "def gc_content(sequence):\n gc = sequence.count('G') + sequence.count('C')\n atgc = sequence.count('A') + sequence.count('T') + sequence.count('G') + sequence.count('C')\n \n return (gc/atgc) * 100", "def percentage(a, b):\n return (a * 100.0) / b", "def just(s: str) -> str:\n return s.ljust(50, \"_\")", "def hash_function_1(key: str) -> int:\n hash = 0\n for letter in key:\n hash += ord(letter)\n return hash", "def unit_of_measurement(self):\n return \"%\"", "def letter_prob(c):\n # check to ensure that c is a single character\n assert(type(c) == str and len(c) == 1)\n\n if c == ' ': return 0.1904\n if c == 'e' or c == 'E': return 0.1017\n if c == 't' or c == 'T': return 0.0737\n if c == 'a' or c == 'A': return 0.0661\n if c == 'o' or c == 'O': return 0.0610\n if c == 'i' or c == 'I': return 0.0562\n if c == 'n' or c == 'N': return 0.0557\n if c == 'h' or c == 'H': return 0.0542\n if c == 's' or c == 'S': return 0.0508\n if c == 'r' or c == 'R': return 0.0458\n if c == 'd' or c == 'D': return 0.0369\n if c == 'l' or c == 'L': return 0.0325\n if c == 'u' or c == 'U': return 0.0228\n if c == 'm' or c == 'M': return 0.0205\n if c == 'c' or c == 'C': return 0.0192\n if c == 'w' or c == 'W': return 0.0190\n if c == 'f' or c == 'F': return 0.0175\n if c == 'y' or c == 'Y': return 0.0165\n if c == 'g' or c == 'G': return 0.0161\n if c == 'p' or c == 'P': return 0.0131\n if c == 'b' or c == 'B': return 0.0115\n if c == 'v' or c == 'V': return 0.0088\n if c == 'k' or c == 'K': return 0.0066\n if c == 'x' or c == 'X': return 0.0014\n if c == 'j' or c == 'J': return 0.0008\n if c == 'q' or c == 'Q': return 0.0008\n if c == 'z' or c == 'Z': return 0.0005\n return 1.0", "def alnum(value: str) -> str:\n return \"\".join(filter(__alnum_ascii__.__contains__, value)).lower()", "def unit_of_measurement(self) -> str:\n return \"%\"", "def unit_of_measurement(self) -> str:\n return \"%\"", "def calc_weight(str,dict):\n for i,c in enumerate(str):\n dict[c] += 10**(len(str)-(i+1))", "def hash_string(to_hash):\n\n chars = string.printable\n\n hashed = \"\"\n\n total = 1\n\n counter = 1\n\n for letter in to_hash:\n\n total *= (chars.index(letter) * counter * len(to_hash)*13)\n\n counter += 1\n\n if counter%3 == 0:\n\n total *= total\n\n total = str(total)[:30]\n\n temp_int = \"\"\n\n for i in range(len(total)):\n\n temp_int += total[i]\n\n if i % 2 != 0:\n\n hashed += chars[int(temp_int)]\n\n temp_int = \"\"\n\n return hashed", "def urldecode_plus(s):\n s = s.replace('+', ' ')\n arr = s.split('%')\n res = arr[0]\n for it in arr[1:]:\n if len(it) >= 2:\n res += chr(int(it[:2], 16)) + it[2:]\n elif len(it) == 0:\n res += '%'\n else:\n res += it\n return res", "def scramble(src):\n\n output = \"\"\n\n for each in src.lower():\n diff = ord(each) - ord('a')\n\n if diff >= 0 and diff < 26:\n output += chr(ord('a') + (25 - (ord(each) - ord('a'))))\n elif each >= '0' and each <= '9':\n output += each\n\n return output", "def EscapeWildcards(string: Text) -> Text:\n precondition.AssertType(string, Text)\n return string.replace(\"%\", r\"\\%\").replace(\"_\", r\"\\_\")", "def unquote(s):\n res = s.split('%')\n # fastpath\n if len(res) == 1:\n return s\n s = res[0]\n for item in res[1:]:\n try:\n s += _hextochr[item[:2]] + item[2:]\n except KeyError:\n s += '%' + item\n except UnicodeDecodeError:\n s += unichr(int(item[:2], 16)) + item[2:]\n return s", "def calculateValue(name):\n\tscore = 0\n\tfor c in name:\n\t\tscore += ord(c) - ord('A') + 1\n\treturn score", "def encode1(s,n):\n r = \"\"\n for l in s:\n l = ord(l) # convert to ascii\n l = l - 97 # 'a' is 97 so we want to reduce so 'a'=0 'b'=1 etc\n l = l + n # add the offset\n l=l%26 # use mod so that we wrap around back to 'a' if we go past 'z'\n l=l+97 # and add back the 97\n r = r + chr(l)\n return r", "def idf(word):\n return math.log10(len(news_content) / document_frequency(word) + 1)# 分母加1,可防止分母为0的情况", "def lexical_density(string):\r\n # YOUR CODE HERE\r\n\r\n from collections import Counter\r\n tokenizedStr = string.strip('.').split(' ')\r\n\r\n c = Counter(tokenize(string.strip('.'), True));\r\n\r\n data = list(c);\r\n data.remove(' ');\r\n\r\n result = len(data)/len(tokenizedStr)\r\n return result", "def letter_prob(c):\n if c == ' ': return 0.1904\n if c == 'e' or c == 'E': return 0.1017\n if c == 't' or c == 'T': return 0.0737\n if c == 'a' or c == 'A': return 0.0661\n if c == 'o' or c == 'O': return 0.0610\n if c == 'i' or c == 'I': return 0.0562\n if c == 'n' or c == 'N': return 0.0557\n if c == 'h' or c == 'H': return 0.0542\n if c == 's' or c == 'S': return 0.0508\n if c == 'r' or c == 'R': return 0.0458\n if c == 'd' or c == 'D': return 0.0369\n if c == 'l' or c == 'L': return 0.0325\n if c == 'u' or c == 'U': return 0.0228\n if c == 'm' or c == 'M': return 0.0205\n if c == 'c' or c == 'C': return 0.0192\n if c == 'w' or c == 'W': return 0.0190\n if c == 'f' or c == 'F': return 0.0175\n if c == 'y' or c == 'Y': return 0.0165\n if c == 'g' or c == 'G': return 0.0161\n if c == 'p' or c == 'P': return 0.0131\n if c == 'b' or c == 'B': return 0.0115\n if c == 'v' or c == 'V': return 0.0088\n if c == 'k' or c == 'K': return 0.0066\n if c == 'x' or c == 'X': return 0.0014\n if c == 'j' or c == 'J': return 0.0008\n if c == 'q' or c == 'Q': return 0.0008\n if c == 'z' or c == 'Z': return 0.0005\n return 1.0", "def clean_pcts(x):\n # if not enough data, will be '-' with investing.com\n if x == '-' or pd.isnull(x):\n return np.nan\n elif x == 'unch':\n return float(0)\n elif type(x) == float:\n return x\n\n new_x = x.replace('+', '')\n new_x = new_x.replace('%', '')\n new_x = float(new_x) / 100\n return new_x", "def custom_hash(str_in):\n \n PRIMES = [\n 2, 3, 5, 7, 11, 13,\n 17, 19, 23, 29, 31,\n 37, 41, 43, 47, 53,\n 59, 61, 67, 71, 73,\n 79, 83, 89, 97, 101\n ]\n\n LOWERCASE_Z_ASCII = ord('z')\n hash_count = 1\n\n for letter in str_in:\n try:\n hash_count *= PRIMES[ord(letter) - LOWERCASE_Z_ASCII]\n except IndexError:\n print(f'list index out of range: {letter} in {str_in}')\n \n return hash_count", "def hash_key(self,key: str) -> int: \n \n total = 0\n prime = 3\n \n for index, char in enumerate(key,start=1):\n \n total += ord(char)*(prime)**index\n \n return total % self.buckets", "def _nth_letter(n):\r\n\treturn string.ascii_lowercase[n % len(string.ascii_lowercase)]", "def alphanumeric(self):\n return self._alphanumeric" ]
[ "0.73372185", "0.6893399", "0.6517647", "0.63861024", "0.61528224", "0.6047942", "0.60439736", "0.5990186", "0.58984005", "0.588371", "0.5831391", "0.58165365", "0.57671547", "0.5763353", "0.57626456", "0.57338387", "0.5726077", "0.5723785", "0.5721825", "0.57174385", "0.5693655", "0.5690244", "0.562634", "0.56194204", "0.5619242", "0.56012535", "0.5545487", "0.554227", "0.5524246", "0.5515458", "0.54468775", "0.5444731", "0.5436202", "0.54349506", "0.5398218", "0.5389614", "0.536821", "0.535775", "0.53526336", "0.53196114", "0.5318026", "0.53061265", "0.5288967", "0.52747315", "0.5271285", "0.5238009", "0.52369237", "0.5235316", "0.5214828", "0.52031326", "0.51851815", "0.518412", "0.5174309", "0.5172709", "0.5172709", "0.51642585", "0.5161798", "0.51318777", "0.51287276", "0.5122448", "0.5120802", "0.51202023", "0.5103503", "0.50996155", "0.5093398", "0.509255", "0.50913936", "0.509059", "0.5090213", "0.508961", "0.50886697", "0.5075452", "0.5063617", "0.50636125", "0.50546813", "0.5048564", "0.50461334", "0.503694", "0.5030563", "0.5030196", "0.50270575", "0.502614", "0.50255215", "0.50255215", "0.5020175", "0.50033396", "0.5000016", "0.49998045", "0.4996248", "0.49930918", "0.49887022", "0.49826705", "0.4979209", "0.49781364", "0.49769905", "0.4975796", "0.49639738", "0.49558845", "0.49547845", "0.49540493" ]
0.75368613
0
Analyzes text lines, in order read from OCR processing. Populates the MailFields object with information gathered from OCR. Uses information from each of the lines to best figure out who is the main addresssee and which box it is trying to reach.
def parse_text_lines(self, text_lines): self.__fields = mail_fields.MailFields() alphanum_threshold = 0.5 # Only evaluate lines that are predominantly alphanumeric for line in text_lines: if _alnum_percent(line) > alphanum_threshold: try: parsed = usaddress.tag(line)[0] except usaddress.RepeatedLabelError as e: # If usaddress gets confused, just throw away the answer as if # we got nothing for now. # TODO(searow): fix this to handle multiple tags and labels. parsed = {} for tag in parsed: self._add_to_fields(tag, parsed[tag]) return self.__fields
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process(self) -> None:\n self.parsed = email.message_from_bytes(self.rawmailcontent, policy=email.policy.EmailPolicy()) # type: email.message.EmailMessage\n\n self.subject = self.parsed[\"subject\"]\n\n if self.parsed[\"X-Jicket-Initial-ReplyID\"] is not None and self.parsed[\"X-Jicket-Initial-ReplyID\"] == self.parsed[\"In-Reply-To\"]:\n self.threadstarter = True\n elif self.config.ticketAddress in self.parsed[\"From\"]: # Take more heuristic approach\n self.threadstarter = True\n\n self.rawmailcontent = None # No need to store after processing\n\n self.get_text_bodies(self.parsed)\n self.textfrombodies()", "def parse(self):\n\t\tfor part in self.mail.walk():\n\t\t\tself.process_part(part)", "def process(Email):\n # convert to lower case\n email = Email.read().lower()\n # strip any HTML\n temp = regx.sub(\"<.*?>\", \" \", email)\n # replace numbers for 0-9 with \"number\"\n temp = regx.sub(\"[0-9]+\", \"number\", temp)\n # replace Http adress to \"httpaddr\"\n temp = regx.sub(\"(http|https)://[^\\s]*\", \"httpaddr\", temp)\n # replace email adress with \"emailaddr\"\n temp = regx.sub(\"[^\\s]+@.*?\\s+\", \"emailaddr\", temp)\n # replace currency sign\n temp = regx.sub(\"[$]+\", \"dollar\", temp)\n temp = regx.sub(\"[']\", \" \", temp)\n # ========================== Tokenize Email ===========================\n # temp = regx.sub(\">+|:+|#+|[$]+|[.]+|@+|/+|-+|&+|[*]+|[+]+|=+|[]]+|[?]+|[()]+|[{}]+|,+|[']+|<+|_+|;+|%+\", \"\", temp)\n\n # remove punctuation\n temp = temp.translate(str.maketrans('', '', string.punctuation))\n\n # split the string in list of words\n tokenized_list = temp.split()\n stemmer = PorterStemmer()\n a = []\n vocab = VocabArray.getVocab()\n extracted_features = mat.zeros((1, len(vocab)))\n\n i = 0\n print(\"========================== Processed Email =========================\")\n for w in range(len(tokenized_list)):\n if len(tokenized_list[w]) < 1:\n continue\n\n # stem the word\n word = stemmer.stem(tokenized_list[w])\n print(word, end=\" \")\n if i > 20:\n i = 0\n print(\"\\n\")\n # get index of the word from vocab list\n indices = mat.where(vocab == word)[0]\n i += 1\n if len(indices) == 0:\n continue\n\n a.append(indices)\n extracted_features[:, indices] = 1\n\n word_indices = mat.c_[mat.array(a)]\n print(\"\\n\")\n return word_indices, extracted_features", "def process(self):\n\n linelang = defaultdict(int)\n wordlang = defaultdict(int)\n\n linefont = defaultdict(int)\n wordfont = defaultdict(int)\n\n inputfiles = self.input_files\n for input_file in inputfiles:\n\n alignurl = input_file.url\n pcgts = parse(alignurl, True)\n page = pcgts.get_Page()\n regions = page.get_TextRegion()\n\n for region in regions:\n lines = region.get_TextLine()\n\n for line in lines:\n try:\n llang = line.primaryLanguage\n linelang[llang] += 1\n except TypeError:\n pass\n\n try:\n lfont = line.fontFamily\n linefont[lfont] += 1\n except TypeError:\n pass\n\n words = line.get_Word()\n for word in words:\n try:\n wlang = word.language\n wordlang[wlang] += 1\n except TypeError:\n pass\n\n try:\n wfont = word.get_TextStyle().fontFamily\n wordfont[wfont] += 1\n except TypeError:\n pass\n\n #predominant language\n try:\n lang = max(linelang, key=lambda k: linelang[k])\n except TypeError:\n try:\n lang = max(wordlang, key=lambda k: wordlang[k])\n except TypeError:\n lang = 'German'\n\n #predominant font\n try:\n font = max(linefont, key=lambda k: linefont[k])\n except TypeError:\n try:\n font = max(wordfont, key=lambda k: wordfont[k])\n except TypeError:\n font = 'Antiqua'\n\n\n print(lang)\n print(font)", "def fiber_provider_parse(self, txt):\n\n #PLANNED WORK (PW) Notification\n p1 = re.compile(r'^PLANNED WORK \\(PW\\) Notification',re.MULTILINE)\n\n #PW Reference number: PWIC12345\n p2 = re.compile(r'^PW Reference number:\\s(?P<pw_ref>\\w+)',re.MULTILINE)\n \n #Start Date and Time: 2019-Apr-09 06:00 UTC\n p3 = re.compile(r'^Start Date and Time:\\s(?P<start_time>[\\w\\-]+\\s[0-9:]+)\\s(?P<start_tz>[A-Za-z]+)', re.MULTILINE)\n\n #End Date and Time: 2019-Apr-09 10:00 UTC\n p4 = re.compile(r'^End Date and Time:\\s(?P<end_time>[\\w\\-]+\\s[0-9:]+)\\s(?P<end_tz>[A-Za-z]+)', re.MULTILINE)\n \n #Service ID: IC-99999\n p5 = re.compile(r'^Service ID:\\s(?P<service>[\\w\\-]+)',re.MULTILINE)\n\n\n\n if p1.search(txt):\n #Notifcation about new planned work\n self.ticket_type = 'New maintenance'\n \n try:\n self.provider_ref = p2.search(txt).group('pw_ref')\n\n timeformat = '%Y-%b-%d %H:%M' #Date/Time format used by Fiber Provider\n \n local_start_time = datetime.strptime(p3.search(txt).group('start_time'), timeformat)\n start_tz = p3.search(txt).group('start_tz')\n self.start_time = MailParser.convert_time(local_start_time, start_tz)\n\n local_end_time = datetime.strptime(p4.search(txt).group('end_time'), timeformat)\n end_tz = p4.search(txt).group('end_tz')\n self.end_time = MailParser.convert_time(local_end_time, end_tz)\n\n self.service = p5.search(txt).group('service')\n except:\n raise Exception('Cannot parse some properties')\n\n #here would go conditions for cancelled work and modification\n # but without actual email its difficult to create\n # \n return self", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n ### split off metadata\n \n content = re.split(\"X-FileName:.*$\", all_text, flags=re.MULTILINE, maxsplit=1)\n words = \"\"\n if len(content) > 1:\n text_string = content[1]\n\n ## remove mails that are forwarded or to which are responded\n # e.g. ---------------------- Forwarded\"\n text_string = re.split(\"-*\\sForwarded\", text_string, maxsplit=1)[0]\n\n # -----Original Message-----\n text_string = re.split(\"-*\\Original\\sMessage\", text_string, maxsplit=1)[0]\n\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # To:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n # or\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # to:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n \n text_string = re.split(\"((.*\\n){2})[Tt]o:\\s\", text_string, maxsplit=1)[0]\n\n ### remove punctuation\n # should be autopmatically by scikit learn\n #text_string = text_string.translate(string.maketrans(\"\", \"\"), string.punctuation)\n\n ### project part 2: comment out the line below\n #words = text_string\n\n ### split the text string into individual words, stem each word,\n ### and append the stemmed word to words (make sure there's a single\n ### space between each stemmed word)\n from nltk.stem.snowball import SnowballStemmer\n\n stemmer = SnowballStemmer(\"english\")\n words = [stemmer.stem(word) for word in text_string.split()]\n\n\n\n return \" \".join(words)", "def _parse_contact_information(self):\n left_column = self.content.find(\"div\", class_=\"linkeSpalte40\")\n graubox = left_column.find(\n lambda tag: tag.name == \"div\" and tag[\"class\"] == [\"grauBox\"]\n )\n\n emails_raw = graubox.find_all(\"a\", class_=\"mail\")\n websites_raw = graubox.find_all(\"a\", class_=\"noDecoration\")\n telephone_raw = graubox.find_all(\"span\", class_=\"telefonnummer\")\n address_raw = [\n e.nextSibling for e in graubox.find_all(\"em\") if e.text == \"Anschrift:\"\n ]\n\n address = address_raw[0].li.get_text(\"\\n\") if address_raw else None\n emails = [re.sub(r\"^mailto:\", \"\", e.attrs[\"href\"]) for e in emails_raw]\n phone_numbers = [t.text for t in telephone_raw]\n websites = [w.attrs[\"href\"] for w in websites_raw]\n\n return {\n \"address\": address,\n \"emails\": emails,\n \"phone_numbers\": phone_numbers,\n \"websites\": websites,\n }", "def parse_report_line(self,line):\n\n report = self.new_police_report()\n report['original_text'] = line\n \n #\n # extract month and day\n match_date = REPORT_DATE_REGEXP.search(line)\n assert(match_date)\n start_index=match_date.start('month')\n stop_index=match_date.end('month')\n report['date_month'] = int(line[start_index:stop_index])\n\n start_index=match_date.start('day')\n stop_index=match_date.end('day')\n report['date_day'] = int(line[start_index:stop_index])\n\n my_logger.debug('extracted date (%d/%d)' % (report['date_month'],report['date_day']))\n\n #############################################\n # extract location & scale\n line = line[0:match_date.start('month')-1] # truncate after start of date\n \n #\n # trim off preceding html and trailing comma\n start_index=line.rfind('>')+1\n assert(start_index>0)\n\n stop_index=line.rfind(',',start_index)\n \n if stop_index >= 2:\n #\n # found a comma, \n line = line[start_index:stop_index]\n else:\n #\n # no comma found\n line = line[start_index:]\n my_logger.debug('truncated string: (%s)' % line)\n report['address']=line\n #\n # try to determine which case:\n # a block\n # an exact address\n # an establishment\n # an intersection\n # special cases, like: \"downtown mountain view\"\n # \n\n if (BLOCK_REGEXP.match(line)!=None):\n my_logger.debug('BLOCK detected')\n report['map_scale']=mapscale.BLOCK\n elif (INTERSECTION_REGEXP.match(line)!=None):\n my_logger.debug('INTERSECTION detected')\n report['map_scale']=mapscale.INTERSECTION\n elif (EXACT_REGEXP.match(line)!=None):\n my_logger.debug('EXACT detected')\n report['map_scale']=mapscale.EXACT\n else:\n #\n # must be manually assigned\n report['map_scale']=mapscale.OTHER\n\n\n return report", "def uniprot_txt_parser(uniprot_txt_lines):\n uniprot = {}\n entry_line = [i for i,l in enumerate(uniprot_txt_lines) if l[:2]=='ID']\n entry_line.append(len(uniprot_txt_lines))\n begin_end = [(begin,entry_line[i+1]) for i,begin in enumerate(entry_line[:-1])]\n for begin,end in begin_end:\n for line in uniprot_txt_lines[begin:end]:\n line = line.rstrip('\\r\\n')\n line = line.rstrip('.')\n line = line.replace(';',' ')\n words = line.split()\n if words[0] == 'AC':\n acc = words[1]\n uniprot[acc] = {}\n elif words[0] == 'DR' and words[1] =='InterPro':\n if uniprot[acc].has_key('interpro'):\n uniprot[acc]['interpro'].append((words[2],1))\n else:\n uniprot[acc]['interpro'] = [(words[2],1)]\n elif words[0] == 'DR' and words[1] == 'Pfam':\n if uniprot[acc].has_key('pfam'):\n uniprot[acc]['pfam'].append((words[2],int(words[-1])))\n else:\n uniprot[acc]['pfam'] = [(words[2],int(words[-1]))]\n elif words[0] == 'DR' and words[1] == 'SMART':\n if uniprot[acc].has_key('smart'):\n uniprot[acc]['smart'].append((words[2],words[-1]))\n else:\n uniprot[acc]['smart'] = [(words[2],words[-1])]\n elif words[0] == 'DR' and words[1] == 'SUPFAM':\n if uniprot[acc].has_key('supfam'):\n uniprot[acc]['supfam'].append((words[2],words[-1]))\n else:\n uniprot[acc]['supfam'] = [(words[2],words[-1])]\n elif words[0] == 'DR' and words[1] == 'PROSITE':\n if uniprot[acc].has_key('prosite'):\n uniprot[acc]['prosite'].append((words[2],words[-1]))\n else:\n uniprot[acc]['prosite'] = [(words[2],words[-1])]\n # elif words[0] == 'DR' and words[1] =='PDB':\n # w = words[-1].replace('/',' ')\n # w = w.replace('=',' ')\n # w = w.replace('-',' ')\n # w = w.split()\n # w = words[2:-1]+w\n\n # if uniprot[acc].has_key('pdb'):\n # uniprot[acc]['pdb'].append(w)\n # else:\n # uniprot[acc]['pdb'] = [w]\n\n return uniprot", "def process_email(email_contents):\n\n # Load Vocabulary\n vocab_list = get_vocab_list();\n\n # Init return value\n word_indices = [];\n\n # ========================== Preprocess Email ===========================\n\n # Find the Headers ( \\n\\n and remove )\n # Uncomment the following lines if you are working with raw emails with the\n # full headers\n\n # hdrstart = strfind(email_contents, ([char(10) char(10)]));\n # email_contents = email_contents(hdrstart(1):end);\n\n # Lower case\n email_contents = email_contents.lower()\n\n # Strip all HTML\n # Looks for any expression that starts with < and ends with > and replace\n # and does not have any < or > in the tag it with a space\n email_contents = re.sub(\"<[^<>]+>\", \" \", email_contents)\n\n # Handle Numbers\n # Look for one or more characters between 0-9\n email_contents = re.sub(\"[0-9]+\", \"number\", email_contents)\n\n # Handle URLS\n # Look for strings starting with http:// or https://\n email_contents = re.sub(\"(http|https)://[^\\s]*\", \"httpaddr\", email_contents)\n\n # Handle Email Addresses\n # Look for strings with @ in the middle\n email_contents = re.sub(\"[^\\s]+@[^\\s]+\", \"emailaddr\", email_contents)\n\n # Handle $ sign\n email_contents = re.sub(\"[$]+\", \"dollar\", email_contents)\n\n # ========================== Tokenize Email ===========================\n\n # Output the email to screen as well\n print(\"\\n==== Processed Email ====\\n\");\n\n # Process file\n l = 0;\n\n # Tokenize and also get rid of any punctuation\n stemmer = PorterStemmer()\n email_contents = re.split(r'[@$/#.-:&\\*\\+=\\[\\]?!(){},\\'\\'\\\">_<;%\\s\\n\\r\\t]+', email_contents)\n for s in email_contents:\n\n # Remove any non alphanumeric characters\n s = re.sub(\"[^a-zA-Z0-9]\", \"\", s)\n\n # Stem the word \n # (the porter_stemmer sometimes has issues, so we use a try catch block)\n #try:\n s = stemmer.stem(s.strip())\n #except:\n # s = \"\"\n # continue\n\n # Skip the word if it is too short\n if len(s) < 1:\n continue\n\n # Look up the word in the dictionary and add to word_indices if\n # found\n # ====================== YOUR CODE HERE ======================\n # Instructions: Fill in this function to add the index of s to\n # word_indices if it is in the vocabulary. At this point\n # of the code, you have a stemmed word from the email in\n # the variable s. You should look up s in the\n # vocabulary list (vocabList). If a match exists, you\n # should add the index of the word to the word_indices\n # vector. Concretely, if s = 'action', then you should\n # look up the vocabulary list to find where in vocabList\n # 'action' appears. For example, if vocabList{18} =\n # 'action', then, you should add 18 to the word_indices \n # vector (e.g., word_indices = [word_indices ; 18]; ).\n # \n # Note: vocabList[idx] returns a the word with index idx in the\n # vocabulary list.\n # \n # Note: You can use s1 == s2 to compare two strings (s1 and\n # s2). It will return True only if the two strings are equivalent.\n #\n\n\n\n # =============================================================\n\n # Print to screen, ensuring that the output lines are not too long\n if (l + len(s)) > 78:\n print()\n l = 0\n print(f\"{s} \", end=\"\")\n l = l + len(s) + 1\n\n # Print footer\n print('\\n\\n=========================')\n return word_indices", "def get_text_lines(instText):\n\n # Find out which part this is\n part = instText.part\n # Get the necessary parameters: lng, ext, dir\n sLng = part.corpus.get_lng_display()\n sDir = part.dir\n sName = instText.fileName\n sFormat = instText.get_format_display()\n # Now try to get the information\n oBack = get_crpp_text(sLng, sDir, sFormat, sName)\n # Prepare what we return\n if oBack == None or oBack['status'] == 'error':\n return None\n else:\n return oBack", "def processEmail(email_contents):\n # % Load Vocabulary\n vocabList = getVocabList()\n\n # % Init return value\n word_indices = []\n\n # % ========================== Preprocess Email ===========================\n # % Find the Headers ( \\n\\n and remove )\n # % Uncomment the following lines if you are working with raw emails with the\n # % full headers\n # %\n # % hdrstart = strfind(email_contents, ([char(10) char(10)]));\n # % email_contents = email_contents(hdrstart(1):end);\n\n # % Lower case\n email_contents = email_contents.lower()\n\n # % Strip all HTML\n # % Looks for any expression that starts with < and ends with > and replace\n # % and does not have any < or > in the tag it with a space\n email_contents = re.sub(r'<[^<>]+>', ' ', email_contents)\n\n # % Handle Numbers\n # % Look for one or more characters between 0-9\n email_contents = re.sub(r'[0-9]+', 'number', email_contents)\n\n # % Handle URLS\n # % Look for strings starting with http:// or https://\n email_contents = re.sub(r'(http|https)://[^\\s]*', 'httpaddr', email_contents)\n\n # % Handle Email Addresses\n # % Look for strings with @ in the middle\n email_contents = re.sub(r'[^\\s]+@[^\\s]+', 'emailaddr', email_contents)\n\n # % Handle $ sign\n email_contents = re.sub(r'[$]+', 'dollar ', email_contents)\n\n # Pick words-like strings\n email_contents_list = re.findall(r'[\\w]+', email_contents)\n email_contents = ' '.join(email_contents_list)\n\n # % ========================== Tokenize Email ===========================\n #\n # % Output the email to screen as well\n print('\\n==== Processed Email ====\\n')\n\n # % Tokenize and also get rid of any punctuation\n porter_stemmer = PorterStemmer()\n words = word_tokenize(email_contents)\n email_contents_list = []\n for index, word in enumerate(words):\n stemmed_word = porter_stemmer.stem(word)\n email_contents_list.append(stemmed_word)\n try:\n index = vocabList.index(stemmed_word)\n except ValueError:\n continue\n else:\n word_indices.append(index)\n\n email = ' '.join(email_contents_list)\n print('Email contents:\\n', email)\n return word_indices", "def _parse_line(self, line):\n fields = line.split('|', 4) # stop splitting after fourth | found\n line_info = {'raw_message': line}\n if len(fields) == 5:\n line_info.update(dict(zip(self._fieldnames, fields)))\n return line_info", "def extract_information(preprocessed_sentences):\n parsed = list(map(lambda sentence: nlp(sentence), preprocessed_sentences))\n\n quantities = list(filter(lambda sentence: eh.sentence_has_type(sentence, 'QUANTITY'), parsed))\n dates = list(filter(lambda sentence: eh.sentence_has_type(sentence, 'DATE'), parsed))\n\n hurricane_name = eh.extract_frequent_regex_match(parsed, '[Hh]urricane ([A-Z][a-z]+)').most_common(1)[0][0]\n hurricane_category = eh.extract_frequent_regex_match(parsed, '[Cc]ategory ([0-9]+)').most_common(1)[0][0]\n\n tropical_storm_name = eh.extract_frequent_regex_match(parsed, '[Tt]ropical [Ss]torm ([A-Z][a-z]+)').most_common(1)[0][0]\n formation_date, middle_month = extract_storm_timeline(dates, hurricane_name)\n\n preperation_info = extract_preparation_information(parsed)\n prep_gpes = preperation_info[0].most_common(3)\n\n restore_info = extract_restoration_information(parsed)\n\n landfall_info = extract_landfall_information(parsed)\n\n wind_info = extract_wind_information(quantities)\n rain_info = extract_rain_information(quantities)\n size_info = extract_size_information(parsed)\n\n # formation_info = extract_formation_info(parsed)\n death_info = extract_death_damages_info(parsed)\n\n print(constants.HURRICANE_SENTENCE.format(hurricane_name, middle_month, hurricane_category))\n print(constants.LANDFALL_SENTENCE.format(hurricane_name, landfall_info[2], landfall_info[3], landfall_info[0], landfall_info[1]))\n print(constants.WIND_SENTENCE.format(wind_info[0], wind_info[1], wind_info[2]))\n print(constants.RAIN_SENTENCE.format(hurricane_name, rain_info[1], rain_info[0], rain_info[2]))\n print(constants.FORMATION_SENTENCE.format(formation_date, tropical_storm_name))\n print(constants.PREPARATION_SENTENCE.format(prep_gpes[0][0], prep_gpes[1][0], prep_gpes[2][0], preperation_info[1].\n most_common(1)[0][0]))\n print(constants.SIZE_SENTENCE.format(size_info[0], size_info[1]))", "def _process_text_line(self, line, columns, format, lower_case, num_line,\n fill_missing=0, filter_case=None,\n strict_separator=False):\n if not isinstance(line, list) and not isinstance(\n line, tuple) and not isinstance(line, numpy.ndarray):\n if format != \"tsv\":\n raise Exception(\"unable to process format \" + format)\n line = line.strip(\"\\r\\n \").replace(\"\\n\", \" \")\n line = DatabaseCore2._split_expr.split(line)\n\n if filter_case is not None:\n line = [filter_case(s) for s in line]\n\n try:\n if fill_missing > 0:\n m = max(columns.keys())\n if m >= len(line):\n line = copy.copy(line)\n add = 0\n while m >= len(line) and add < fill_missing:\n a, b = columns[len(line)]\n if b is int:\n line.append(\"0\")\n elif b is float:\n line.append(\"0.0\")\n elif b is decimal.Decimal:\n line.append(\"0\")\n elif b is str:\n line.append(\"\")\n else:\n line.append(\"\")\n add += 1\n\n res = {}\n for c, v in columns.items():\n if \"AUTOFILL\" in v:\n res[v[0]] = \"NULL\"\n elif \"AUTOINCREMENT\" in v:\n continue\n else:\n if c >= len(line):\n self.LOG(\n \"(a)line number \",\n num_line,\n \"*unable to process a line columns \",\n c,\n \"#\",\n line,\n \" columns \",\n columns)\n return None\n\n val = line[c]\n if len(v) > 2 and v[2].lower() not in [\n \"primarykey\", \"autofill\"]:\n val = v[2](val)\n\n try:\n if isinstance(v[1], tuple):\n val = v[1][0](val)\n elif v[1] is datetime.datetime:\n if isinstance(val, datetime.datetime):\n pass\n elif isinstance(val, str):\n val = datetime.datetime.parse(val)\n else:\n raise TypeError(\n \"unable to convert %s into datetime\" % str(\n type(val)))\n else:\n val = v[1](val)\n except ValueError: # as e :\n self.LOG(\n \"(b)line number \",\n num_line,\n \"**unable to process a line columns \",\n c,\n \"#\",\n v[0],\n \" type \",\n v[1],\n \" value \",\n repr(\n line[c]))\n return None\n\n if isinstance(val, str):\n val = val.replace(\"'\", \"''\")\n if lower_case:\n val = val.lower()\n res[v[0]] = val\n\n return res\n except Exception:\n self.LOG(\"(c)line number\", num_line,\n \"***unable to process a line columns:\", line)\n return None", "def process_line(line):\n\n name_comp_list = []\n givenname_comp_list = []\n surname_comp_list = []\n geocode_comp_list = []\n locality_comp_list = []\n date1_comp_list = []\n date2_comp_list = []\n\n # Split the line into the basic fields - - - - - - - - - - - - - - - - - - -\n #\n if (config.in_file_type in ['CSV','CSVQ','TAB','TABQ']):\n # Comma or tabulator separated\n try:\n line_list = config.line_parser.parse(line)\n except:\n log_message('CSV line parsing failed with inout: '+line,'err')\n\n if (len(line_list) < config.input_len):\n log_message('Input line does not contain enough fields,' +\\\n 'fill up with empty fields','warn')\n while (len(line_list) < config.input_len):\n line_list.append('')\n\n config.curr_line_list = line_list # Save current line list\n\n # Extract fields into different component lists - - - - - - - - - - - - - -\n #\n if (config.input_component['name'] != []): # Extract name fields\n for i in config.input_component['name']:\n name_comp_list.append(line_list[i])\n\n else: # Extract givenname and surname into separate components - - - - - -\n if (config.input_component['givenname'] != []): # Extract g-name fields\n for i in config.input_component['givenname']:\n givenname_comp_list.append(line_list[i])\n\n if (config.input_component['surname'] != []): # Extract surname fields\n for i in config.input_component['surname']:\n surname_comp_list.append(line_list[i])\n\n if (config.input_component['geocode'] != []): # Extract geocode fields\n for i in config.input_component['geocode']:\n geocode_comp_list.append(line_list[i])\n\n if (config.input_component['locality'] != []): # Extract locality fields\n for i in config.input_component['locality']:\n locality_comp_list.append(line_list[i])\n\n if (config.input_component['date1'] != []): # Extract date1 fields\n for i in config.input_component['date1']:\n date1_comp_list.append(line_list[i])\n\n if (config.input_component['date2'] != []): # Extract date2 fields\n for i in config.input_component['date2']:\n date2_comp_list.append(line_list[i])\n\n elif (config.in_file_type == 'COL'): # Column based input file - - - - - - -\n\n if (len(line) < config.input_len):\n log_message('Input line is not long enough, fill up with spaces','warn')\n line += ' '*(config.input_len-len(line))\n\n if (config.input_component['name'] != []): # Extract name fields\n for (col_start,length) in config.input_component['name']:\n name_comp_list.append(line[col_start,col_start+length])\n\n else: # Extract givenname and surname into separate components - - - - - -\n if (config.input_component['givenname'] != []): # Extract g-name fields\n for (col_start,length) in config.input_component['givenname']:\n givenname_comp_list.append(line[col_start,col_start+length])\n\n if (config.input_component['surname'] != []): # Extract surname fields\n for (col_start,length) in config.input_component['surname']:\n surname_comp_list.append(line[col_start,col_start+length])\n\n if (config.input_component['geocode'] != []): # Extract geocode fields\n for (col_start,length) in config.input_component['geocode']:\n geocode_comp_list.append(line[col_start,col_start+length])\n\n if (config.input_component['locality'] != []): # Extract locality fields\n for (col_start,length) in config.input_component['locality']:\n locality_comp_list.append(line[col_start,col_start+length])\n\n if (config.input_component['date1'] != []): # Extract date1 fields\n for (col_start,length) in config.input_component['date1']:\n date1_comp_list.append(line[col_start,col_start+length])\n\n if (config.input_component['date2'] != []): # Extract date2 fields\n for (col_start,length) in config.input_component['date2']:\n date2_comp_list.append(line[col_start,col_start+length])\n\n # elif (config.in_file_type == 'SQL'): # - - - - - - - - - - - - - - - - - -\n\n ################################\n # Add later: SQL database access\n ################################\n\n msg = [' Component basic field lists:', \\\n ' Name: '+str(name_comp_list), \\\n ' Given name: '+str(givenname_comp_list), \\\n ' Surname: '+str(surname_comp_list), \\\n ' Geocode: '+str(geocode_comp_list), \\\n ' Locality: '+str(locality_comp_list), \\\n ' Date1: '+str(date1_comp_list), \\\n ' Date2: '+str(date2_comp_list)]\n log_message(msg,'v2')\n\n name_comp = ''\n givenname_comp = ''\n surname_comp = ''\n geocode_comp = ''\n locality_comp = ''\n date1_comp = ''\n date2_comp = ''\n\n # Now clean and then concatenate component lists into strings - - - - - - - -\n #\n if (name_comp_list != []): # Name component\n name_comp = name_comp_list[0] # Start with first field in list\n\n for f in name_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['name'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['name'] == 1):\n sep = check_field_spill(name_comp, f)\n\n name_comp = name_comp+sep+f # Append separator and field\n\n if (givenname_comp_list != []): # Givenname component - - - - - - - - - - -\n givenname_comp = givenname_comp_list[0] # Start with first field in list\n\n for f in givenname_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['givenname'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['givenname'] == 1):\n sep = check_field_spill(givenname_comp, f)\n\n givenname_comp = givenname_comp+sep+f # Append separator and field\n\n if (surname_comp_list != []): # Surname component - - - - - - - - - - - - -\n surname_comp = surname_comp_list[0] # Start with first field in list\n\n for f in surname_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['surname'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['surname'] == 1):\n sep = check_field_spill(surname_comp, f)\n\n surname_comp = surname_comp+sep+f # Append separator and field\n\n if (geocode_comp_list != []): # Geocode component - - - - - - - - - - - - -\n geocode_comp = geocode_comp_list[0] # Start with first field in list\n\n for f in geocode_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['geocode'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['geocode'] == 1):\n sep = check_field_spill(geocode_comp, f)\n\n geocode_comp = geocode_comp+sep+f # Append separator and field\n\n if (locality_comp_list != []): # Locality component - - - - - - - - - - - -\n locality_comp = locality_comp_list[0] # Start with first field in list\n\n for f in locality_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['locality'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['locality'] == 1):\n sep = check_field_spill(locality_comp, f)\n\n locality_comp = locality_comp+sep+f # Append separator and field\n\n if (date1_comp_list != []): # Date1 component - - - - - - - - - - - - - - -\n date1_comp = date1_comp_list[0] # Start with first field in list\n\n for f in date1_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['date1'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['date1'] == 1):\n if (date1_comp[-1] != ' ') and (f[0] != ' '):\n tmp_list0 = date1_comp.split()\n tmp_list1 = f.split()\n check_word = tmp_list0[-1]+tmp_list1[0]\n\n if (check_word in ['jan','feb','mar','apr','may','jun','jul','aug', \\\n 'sep','oct','nov','dec','january','february','march','april', \\\n 'may','june','july','august','september','october','november', \\\n 'december']):\n\n sep = '' # Set separator to no space\n msg = ' Correct date1 word spilling: \"'+date1_comp+'\",\"'+f+'\"'\n log_message(msg,'v1')\n\n date1_comp = date1_comp+sep+f # Append separator and field\n\n if (date2_comp_list != []): # Date2 component - - - - - - - - - - - - - - -\n date2_comp = date2_comp_list[0] # Start with first field in list\n\n for f in date2_comp_list[1:]: # Loop over following fields (if any)\n if (f != ''):\n if (config.input_space_sep['date2'] == 1):\n sep = ' ' # Set separator to space between fields\n else:\n sep = '' # No space between fields\n\n # Check field spilling only if space separator is set to ' ' \n #\n if (sep == ' ') and (config.input_check_spilling['date2'] == 1):\n if (date2_comp[-1] != ' ') and (f[0] != ' '):\n tmp_list0 = date1_comp.split()\n tmp_list1 = f.split()\n check_word = tmp_list0[-1]+tmp_list1[0]\n\n if (check_word in ['jan','feb','mar','apr','may','jun','jul','aug', \\\n 'sep','oct','nov','dec','january','february','march','april', \\\n 'may','june','july','august','september','october','november', \\\n 'december']):\n\n sep = '' # Set separator to no space\n msg = ' Correct date1 word spilling: \"'+date1_comp+'\",\"'+f+'\"'\n log_message(msg,'v1')\n\n date2_comp = date2_comp+sep+f # Append separator and field\n\n # Check if name component is given or givenname and surname separately - - -\n #\n if (config.input_component['givenname'] != []) or \\\n (config.input_component['surname'] != []):\n name_comp = [givenname_comp, surname_comp]\n\n msg = [' Components:', \\\n ' Name: \"'+str(name_comp)+'\"', \\\n ' Geocode: \"'+geocode_comp+'\"', \\\n ' Locality: \"'+locality_comp+'\"', \\\n ' Date1: \"'+date1_comp+'\"', \\\n ' Date2: \"'+date2_comp+'\"']\n log_message(msg,'v1')\n\n return [name_comp, geocode_comp, locality_comp, date1_comp, date2_comp]", "def extractLineData(sentEnPath, sentFrPath, sentRefPath, sentAnnotPath,\n enList=[], frList=[], refList=[], annotList=[]):\n # get the sentences and annotations\n with open(sentEnPath) as enFile:\n enList = enList + [s.replace(u'\\n', u'') for s in enFile.readlines()]\n with open(sentFrPath) as frFile:\n frList = frList + [s.replace(u'\\n', u'') for s in frFile.readlines()]\n with open(sentRefPath) as refFile:\n refList = refList + [s.replace(u'\\n', u'') for s in refFile.readlines()]\n with open(sentAnnotPath) as annotFile:\n sentAnnotList = annotFile.readlines()\n dic = {u'0\\n': u'0.0', u'1\\n': u'1.0', u'1.1.0\\n': u'1.1', u'0.1.0\\n': u'0.1'}\n tempList = []\n for annot in sentAnnotList:\n if annot in dic:\n tempList.append(dic[annot])\n else:\n tempList.append(annot.replace(u'\\n', u''))\n annotList = annotList + tempList\n return enList, frList, refList, annotList", "def read_enron_emails(input_file, start_line_number,data):\n\n with open(input_file, 'r') as file:\n lines = file.readlines()\n\n count = 1\n for line in lines:\n print (count, line.strip())\n read_enron_email(line.strip(), start_line_number,data)\n count = count + 1\n\n file.close()", "def extract_text(infile):\n # Get text from mudraw\n text = subprocess.check_output(['mudraw', '-F', 'txt', infile])\n\n # Cleanup raw text\n match = re.search(\n r'.*?Activity \\/ Remarks(?P<table1>.*?)Activities not shown on the ' +\n r'DABS Chart Side:.*?Activity \\/ Remarks(?P<table2>.*?)For detailed ' +\n r'information regarding the DABS',\n text,\n re.MULTILINE | re.DOTALL)\n if not match:\n raise ExtractionError('Could not extract text from PDF.')\n false_or_none_string = lambda x: bool(x) and x.lower() != 'none'\n data = '\\n\\n\\n'.join(match.groups())\n raw_parts = re.sub(r'\\n[ \\t]+\\n', '\\n\\n', data).split('\\n\\n\\n')\n parts = filter(false_or_none_string, map(lambda x: x.strip(), raw_parts))\n\n # Write CSV\n headers = (\n b'Firing-Nr\\nD-/R-Area\\nNOTAM-Nr',\n b'Validity UTC',\n b'Lower Limit\\nAMSL or FL',\n b'Upper Limit\\nAMSL or FL',\n b'Location',\n b'Center Point',\n b'Covering Radius',\n b'Activity / Remarks',\n )\n rows = []\n for i, part in enumerate(parts):\n # Regexes\n multiple_newlines_re = re.compile(r'\\n+')\n height_re = re.compile(r'(GND|[0-9]+m \\/ [0-9]+ft|FL[0-9]{2,3}|REF AIP)')\n center_radius_re = re.compile(r'([0-9]{6}N [0-9]{7}E)\\s+?(.*?NM)')\n\n # Separate columns (warning: hackish code ahead!)\n row = {}\n step1 = re.split(r'([0-2][0-9][0-6][0-9] - [0-2][0-9][0-6][0-9])', part)\n row['nr'] = step1[0].strip()\n timestring = '\\n'.join(step1[1:-1])\n row['validity'] = multiple_newlines_re.sub('\\n', timestring)\n step2 = filter(None, height_re.split(step1[-1].strip()))\n row['lower'] = step2[0]\n row['upper'] = step2[2]\n step3 = filter(None, center_radius_re.split(step2[-1].strip()))\n row['location'] = step3[0].strip()\n row['center'] = step3[1].strip()\n row['radius'] = step3[2].strip()\n row['activity'] = multiple_newlines_re.sub('\\n', step3[3].strip())\n\n # Add to list of rows\n rows.append((\n row['nr'].encode('utf8'),\n row['validity'].encode('utf8'),\n row['lower'].encode('utf8'),\n row['upper'].encode('utf8'),\n row['location'].encode('utf8'),\n row['center'].encode('utf8'),\n row['radius'].encode('utf8'),\n row['activity'].encode('utf8'),\n ))\n\n return tablib.Dataset(*rows, headers=headers)", "def parseLine(self, line):\n\n # Bail out on lines with a malformed timestamp\n try:\n timestamp = time.mktime(time.strptime(line[1:25], \"%a %b %d %H:%M:%S %Y\"))\n except:\n return\n \n text = line[27:]\n \n if self.myname: \n self.attendance.mark(timestamp, self.myname)\n text = self.re_myname.sub(self.myname + ' ', text) \n \n damage = self.re_damage.search(text)\n #damage = False\n death = self.re_death.search(text)\n #death = False\n miss = self.re_miss.search(text)\n #miss = False\n #defensive = self.re_defensive.search(text)\n defensive = False\n loot = self.re_loot.search(text)\n attendance = self.re_attendance.search(text)\n if damage:\n (attacker, atktype, defender, amount, nonmelee) = damage.groups()\n if nonmelee:\n atktype = 'non-melee'\n if self.extract and (self.extract == attacker or self.extract == defender):\n self.fights.getFight(timestamp, attacker, defender).addAttack(timestamp, atktype, int(amount))\n if attacker.count(' ') == 0:\n self.attendance.mark(timestamp, attacker)\n if defender.count(' ') == 0:\n self.defender.mark(timestamp, defender)\n elif miss:\n (attacker, atktype, defender) = miss.groups()\n if self.extract and (self.extract == attacker or self.extract == defender):\n self.fights.getFight(timestamp, attacker, defender).addAttack(timestamp, atktype, 'miss')\n if attacker.count(' ') == 0:\n self.attendance.mark(timestamp, attacker)\n if defender.count(' ') == 0:\n self.defender.mark(timestamp, defender)\n elif defensive:\n (attacker, atktype, defender, defensetype) = defensive.groups()\n if self.extract and (self.extract == attacker or self.extract == defender):\n self.fights.getFight(timestamp, attacker, defender).addAttack(timestamp, atktype, defensetype)\n if attacker.count(' ') == 0:\n self.attendance.mark(timestamp, attacker)\n if defender.count(' ') == 0:\n self.defender.mark(timestamp, defender)\n elif death:\n (defender, junk, attacker) = death.groups()\n if junk.count('have slain'):\n (defender, attacker) = (attacker, defender)\n # Use PC deaths to track their attendance\n if defender.count(' ') == 0:\n self.attendance.mark(timestamp, defender)\n elif attacker.count(' ') == 0:\n self.kills.addKill(timestamp, defender)\n if self.extract and (self.extract == attacker or self.extract == defender):\n self.fights.addDeath(timestamp, attacker, defender)\n if attacker.count(' ') == 0:\n self.attendance.mark(timestamp, attacker)\n elif loot:\n (looter, item) = loot.groups()\n self.loot.addLoot(timestamp, looter, item)\n self.attendance.mark(timestamp, looter)\n elif attendance:\n attendee = attendance.group(1)\n self.attendance.mark(timestamp, attendee)", "def process_line(self, line):\n find_result = re.findall(LINE_REGEX, line)\n line_data = {r[0]: r[1] for r in find_result}\n self.process_url(line_data.get('request_to'))\n self.process_status_code(line_data.get('response_status'))", "def loadText(self,textFileName):\n #--Text File\n infoKey = None\n text = None\n texts = {}\n reHeader = re.compile('^#')\n reInfo = re.compile('@ +(\\d) +\"(.+?)\" +(\\d+)')\n reSingleQuote = re.compile('[\\x91\\x92]')\n reDoubleQuote = re.compile('[\\x93\\x94]')\n reEllipsis = re.compile('\\x85')\n reEolSpaces = re.compile(r' +\\r\\n')\n reExtraSpaces = re.compile(r' +')\n reIllegalChars = re.compile(r'[@#]')\n #--Read file\n textFile = file(textFileName,'rb')\n for line in textFile:\n if reHeader.match(line): continue\n maInfo = reInfo.match(line)\n if maInfo:\n infoKey = (int(maInfo.group(1)),maInfo.group(2),maInfo.group(3))\n texts[infoKey] = text = []\n else:\n text.append(line)\n textFile.close()\n #--Strip and clean texts\n updated = []\n unmatched = []\n trimmed = {}\n for infoKey in texts.keys():\n if infoKey not in self.infos:\n unmatched.append(infoKey)\n continue\n text = ''.join(texts[infoKey])\n #--Required Subs\n text = text.strip(' \\r\\n')\n text = reSingleQuote.sub('\\'',text)\n text = reDoubleQuote.sub('\"',text)\n text = reEllipsis.sub('...',text)\n text = reIllegalChars.sub('',text)\n #--Optional subs\n text = reEolSpaces.sub('\\r\\n',text)\n text = reExtraSpaces.sub(' ',text)\n #--Trim?\n if len(text) > 511:\n trimmed[infoKey] = (text[:511],text[511:])\n text = text[:511]\n info = self.infos[infoKey]\n if text != info.text:\n info.text = text\n info.setChanged()\n updated.append(infoKey)\n #--Report\n buff = cStringIO.StringIO()\n for header,infoKeys in ((_('Updated'),updated),(_('Unmatched'),unmatched)):\n if infoKeys:\n buff.write('=== %s\\n' % (header,))\n for infoKey in infoKeys:\n buff.write('* %s\\n' % (infoKey,))\n if trimmed:\n buff.write('=== %s\\n' % (_('Trimmed'),))\n for infoKey,(preTrim,postTrim) in trimmed.items():\n buff.write(`infoKey`+'\\n'+preTrim+'<<<'+postTrim+'\\n\\n')\n return buff.getvalue()", "def extract_features(tlc):\n text = clean_text(tlc['body'])\n fields = dict()\n # add features here #\n fields['Top_comment_word_count'] = len(text.split(' '))\n fields['Top_comment_text'] = text\n\n # Extract time-based features\n def get_day_of_week(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').weekday() + 1\n\n def get_day_of_month(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').day\n\n def get_time_of_day(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').hour\n time_local = time.localtime(tlc['created_utc'])\n time_local = time.strftime(\"%Y-%m-%d %H:%M:%S\", time_local)\n fields['Top_comment_day'] = get_day_of_month(time_local)\n fields['Top_comment_day_of_week'] = get_day_of_week(time_local)\n fields['Top_comment_hour'] = get_time_of_day(time_local)\n\n # Extract gender value\n gp = GenderPerformr()\n probs, _ = gp.predict(tlc['author'])\n # Rescale it from [0,1] to [-1,1]\n fields['Top_comment_author_gender_value'] = 2 * probs - 1\n\n # Extract percentage of mispellings\n check = SpellChecker(\"en_US\")\n tokenizer = get_tokenizer(\"en_US\")\n # Prevent the denominator from 0\n def weird_division(n, d):\n return n / d if d else 0\n\n def get_mispellings_percentage(text):\n mispelling_count = 0\n total_count = 0\n if text == 'nan':\n return total_count\n else:\n check.set_text(text)\n for err in check:\n mispelling_count = mispelling_count + 1\n for w in tokenizer(text):\n total_count = total_count + 1\n value = weird_division(mispelling_count, total_count)\n return value\n fields['Top_comment_mispellings'] = get_mispellings_percentage(text)\n\n # Get politeness, agreement, support scores, and rescale them from [1,5] to [-1,1]\n ar = Agreementr()\n pr = Politenessr()\n sr = Supportr()\n fields['Top_comment_agreement_value'] = 0.5*float(ar.predict([text]))-1.5\n fields['Top_comment_politeness_value'] = 0.5*float(pr.predict([text]))-1.5\n fields['Top_comment_support_value'] = 0.5*float(sr.predict([text]))-1.5\n\n # Get toxicity scores\n KEY = \"yourkey.txt\" # os.getenv(\"GOOGLE_API_KEY\")\n service = discovery.build('commentanalyzer', 'v1alpha1', developerKey=KEY)\n\n def get_results(request_id, response, exception):\n toxicity_scores.append((request_id, response))\n\n toxicity_scores = []\n count = 0\n batch = service.new_batch_http_request(callback=get_results)\n analyze_request = {\n 'comment': {'text': text},\n \"requestedAttributes\": {\n \"TOXICITY\": {},\n \"SEVERE_TOXICITY\": {},\n \"ATTACK_ON_COMMENTER\": {}\n }\n }\n batch.add(service.comments().analyze(body=analyze_request), request_id=str(count))\n batch.execute()\n toxic_score = toxicity_scores[0][1]['attributeScores']['TOXICITY']['summaryScore']['value']\n attack_score = toxicity_scores[0][1]['attributeScores']['ATTACK_ON_COMMENTER']['summaryScore']['value']\n if toxic_score > 0.5:\n fields['Top_comment_untuned_toxicity'] = 1\n else:\n fields['Top_comment_untuned_toxicity'] = 0\n if toxic_score > 0.8 and attack_score > 0.5:\n fields['Top_comment_tuned_toxicity'] = 1\n else:\n fields['Top_comment_tuned_toxicity'] = 0\n # end of feature extractions #\n return fields", "def _populate(self):\n if not hasattr(self, 'multiline'):\n start = self.start\n end = self.end\n txt = self.filetext\n self.start_line = txt.count('\\n', 0, start) + 1\n self.start_column = start - txt.rfind('\\n', 0, start) - 1\n self.end_line = txt.count('\\n', start, end) + self.start_line\n self.end_column = end - txt.rfind('\\n', 0, end) - 1\n self.multiline = self.start_line != self.end_line", "def processEmail(email_contents):\n # Lower case\n email_contents = email_contents.lower()\n # Strip all HTML\n email_contents = re.sub('<[^<>]+>', ' ', email_contents)\n # Handle Numbers\n email_contents = re.sub('[0-9]+', 'number', email_contents)\n # Handle URLS\n email_contents = re.sub('(http|https)://[^\\s]*', 'httpaddr', email_contents)\n # Handle Email Addresses\n email_contents = re.sub('[^\\s]+@[^\\s]+', 'emailaddr', email_contents)\n # Handle $ sign\n email_contents = re.sub('[$]+', 'dollar', email_contents)\n # Remove any non alphanumeric characters\n email_contents = re.sub('[^a-zA-Z]', ' ', email_contents)\n # Tokenize ane remove single characters\n ps = PorterStemmer()\n email_contents = [ps.stem(token) for token\n in email_contents.split(\" \") if len(token) > 1]\n\n vocabList = getVocabList()\n word_indices = []\n for word in email_contents:\n ind = vocabList[vocabList.vocab == word].index\n if ind.any():\n word_indices.append(ind[0])\n print(word, '\\t', ind[0])\n\n return email_contents, word_indices", "def _parse_records(self, customization=None):\n def _add_parsed_record(record, records):\n \"\"\"\n Atomic function to parse a record\n and append the result in records\n \"\"\"\n if record != \"\":\n logger.debug('The record is not empty. Let\\'s parse it.')\n parsed = self._parse_record(record, customization=customization)\n if parsed:\n logger.debug('Store the result of the parsed record')\n records.append(parsed)\n else:\n logger.debug('Nothing returned from the parsed record!')\n else:\n logger.debug('The record is empty')\n\n records = []\n record = \"\"\n # read each line, bundle them up until they form an object, then send for parsing\n for linenumber, line in enumerate(self.bibtex_file_obj):\n logger.debug('Inspect line %s', linenumber)\n if line.strip().startswith('@'):\n # Remove leading whitespaces\n line = line.lstrip()\n logger.debug('Line starts with @')\n # Parse previous record\n _add_parsed_record(record, records)\n # Start new record\n logger.debug('The record is set to empty')\n record = \"\"\n # Keep adding lines to the record\n record += line\n\n # catch any remaining record and send it for parsing\n _add_parsed_record(record, records)\n logger.debug('Set the list of entries')\n self.bib_database.entries = records", "def process_message(mail):\n\tmessage = email.message_from_string(mail)\t#parsing metadata\n\tdatetuple = email.utils.parsedate_tz(message.__getitem__('Date'))\n\tfiledirectory = basedirectory\n\tif not datetuple:\n\t\tdatetuple = email.utils.parsedate_tz(message.__getitem__('Delivery-date'))\n\tif directory_for_year: \n\t\tfiledirectory = os.path.join(filedirectory, str(datetuple[0]))\n\tif directory_for_month:\n\t\tfiledirectory = os.path.join(filedirectory, str(datetuple[1])) \n\tdateposix = email.utils.mktime_tz(datetuple)\n\tlocaldate = datetime.datetime.fromtimestamp(dateposix)\n\tdatestring = localdate.strftime('%Y%m%d-%H%M') # +'-'+'-'.join(time.tzname) #\n\tsender = email.utils.parseaddr(message['To'])[1].replace('@','_').replace('.','-')\n\tsubject = email.header.decode_header(message['Subject'])[0][0]\n\tfilename = datestring + '_' + sender[:60] + '_' + subject[:60]\n\n\t# parsing mail content\n\tmailstring = ''\n\tfor headername, headervalue in message.items():\n\t\tmailstring += headername + ': ' + headervalue + '\\r\\n'\t# add \\r\\n or\n\tif message.get_content_maintype() == 'text':\n\t\tmailstring += message.get_payload(decode=True)\n\n\t# handle multipart: \n\telif message.get_content_maintype() == 'multipart':\n\t\tpartcounter = 0\n\t\tfor part in message.walk():\n\t\t\tif part.get_content_maintype() == 'text':\t# also: text/html\n\t\t\t\tfor header, value in part.items():\n\t\t\t\t\tmailstring += header + ': ' + value + '\\r\\n'\n\t\t\t\t\tmailstring += '\\r\\n' + part.get_payload(decode=True) + '\\r\\n'\n\t\t\t# skip multipart containers\n\t\t\telif part.get_content_maintype() != 'multipart':\n\t\t\t\tpartcounter += 1\n\t\t\t\ttry:\n\t\t\t\t\tattachmentname = email.header.decode_header(part.get_filename())[0][0]\n\t\t\t\texcept:\n\t\t\t\t\tattachmentname = \"\"\n\t\t\t\t\tprint(\"Error when parsing filename.\")\n\t\t\t\tif not attachmentname:\n\t\t\t\t\text = mimetypes.guess_extension(part.get_content_type())\n\t\t\t\t\tif not ext:\n\t\t\t\t\t\text = '.bin'\t# use generic if unknown extension\n\t\t\t\t\tattachmentname = 'attachment' + str(partcounter) + ext\n\t\t\t\tattfilename = filename + '_' + attachmentname\n\t\t\t\twrite_to_file(filedirectory, attfilename, part.get_payload(decode=True))\n\twrite_to_file(filedirectory, filename+'.txt', mailstring)", "def _process_incoming_mail(raw_message, recipients):\n recipients = [x[1] for x in email.utils.getaddresses([recipients])]\n\n incoming_msg = mail.InboundEmailMessage(raw_message)\n\n if 'X-Google-Appengine-App-Id' in incoming_msg.original:\n raise InvalidIncomingEmailError('Mail sent by App Engine')\n\n # Use the subject to find the issue number.\n # Originally the tag was (issueNNN).\n # Then we changed it to be (issue NNN by WHO).\n # We want to match either of these, and we need to deal with\n # the fact that some mail readers will fold the long subject,\n # turning a single space into \"\\r\\n \".\n # We use \"issue\\s*\" to handle all these forms,\n # and we omit the closing ) to accept both the original and the \"by WHO\" form.\n subject = incoming_msg.subject or ''\n match = re.search(r'\\(issue\\s*(?P<id>\\d+)', subject)\n if match is None:\n raise InvalidIncomingEmailError('No issue id found: %s', subject)\n issue_id = int(match.groupdict()['id'])\n issue = models.Issue.get_by_id(issue_id)\n if issue is None:\n raise InvalidIncomingEmailError('Unknown issue ID: %d' % issue_id)\n sender = email.utils.parseaddr(incoming_msg.sender)[1]\n\n body = None\n for _, payload in incoming_msg.bodies('text/plain'):\n # FIXME(andi): Remove this when issue 2383 is fixed.\n # 8bit encoding results in UnknownEncodingError, see\n # http://code.google.com/p/googleappengine/issues/detail?id=2383\n # As a workaround we try to decode the payload ourselves.\n if payload.encoding == '8bit' and payload.charset:\n body = payload.payload.decode(payload.charset)\n # If neither encoding not charset is set, but payload contains\n # non-ASCII chars we can't use payload.decode() because it returns\n # payload.payload unmodified. The later type cast to db.Text fails\n # with a UnicodeDecodeError then.\n elif payload.encoding is None and payload.charset is None:\n # assume utf-8 but set replace flag to go for sure.\n body = payload.payload.decode('utf-8', 'replace')\n else:\n body = payload.decode()\n break\n if body is None or not body.strip():\n raise InvalidIncomingEmailError('Ignoring empty message.')\n elif len(body) > django_settings.RIETVELD_INCOMING_MAIL_MAX_SIZE:\n # see issue325, truncate huge bodies\n trunc_msg = '... (message truncated)'\n end = django_settings.RIETVELD_INCOMING_MAIL_MAX_SIZE - len(trunc_msg)\n body = body[:end]\n body += trunc_msg\n\n # If the subject is long, this might come wrapped into more than one line.\n subject = ' '.join([x.strip() for x in subject.splitlines()])\n msg = models.Message(issue_key=issue.key, parent=issue.key,\n subject=subject,\n sender=sender,\n recipients=[x for x in recipients],\n date=datetime.datetime.now(),\n text=body,\n draft=False)\n\n # Add sender to reviewers if needed.\n all_emails = [str(x).lower()\n for x in ([issue.owner.email()] +\n issue.reviewers +\n issue.cc +\n issue.collaborator_emails())]\n if sender.lower() not in all_emails:\n query = models.Account.query(models.Account.lower_email == sender.lower())\n account = query.get()\n if account is not None:\n issue.reviewers.append(account.email) # e.g. account.email is CamelCase\n else:\n issue.reviewers.append(db.Email(sender))\n\n issue.calculate_updates_for(msg)\n issue.put()\n msg.put()", "def transform(self, email_path):\n mail = open(email_path, 'r')\n content = mail.read(self.max_read_len)\n i = 0\n while not(content[i] == '\\n' and content[i + 1] == '\\n') and i < len(content) - self.ngram:\n i += 1\n header = content[:i]\n # TODO find a smarter way deal with the header-body problem\n body = content[i + 2:]\n if len(body) + len(header) > self.max_read_len:\n body = body[:max(1000, self.max_read_len - len(header))]\n header_set = self.tokenize(header)\n body_set = self.tokenize(body)\n mail.close()\n return (header_set, body_set)", "def process(self, processors) -> MultiLineString:", "def analyze_message(messageIn):\n\n\tcontent = set()\n\tword_count = Counter()\n\temail = None\n\n\t#Iterate through file contents\n\t#for line in messageIn['body']:\n\tline_tokens = messageIn['body'].split()\n\n\tif len(line_tokens) > 1:\n\t\t#Discard links and strip characters. Add it to the list\n\t\tnew_content, new_count = clean_line(line_tokens)\n\t\tcontent = content.union(new_content)\n\t\tword_count += new_count\n\n\t# Add our fields\n\tcontrol.update_one(\n\t\t{\n\t\t\t'_id': messageIn['_id']\n\t\t},\n\t\t{\n\t\t\t\"$set\":{\n\t\t\t\t\"email\":messageIn['headers']['From'],\n\t\t\t\t\"words\":list(content),\n\t\t\t\t\"wordCount\":word_count,\n\t\t\t\t\"year\": messageIn['headers']['Date'].split()[3],\n\t\t\t\t\"version\":1\n\t\t\t}\n\t\t}\n\t)", "def initialization_text_data(self, text, sentences, taggedSentences, rptType='vaers'):\n \n self.text = text\n self.sentences = sentences\n self.reportType = rptType\n \n n = len(sentences)\n locsSentStarts = [-1] * n\n curpt = 0\n for i in range(n):\n pos = text[curpt:].find(sentences[i])\n locsSentStarts[i] = pos + curpt\n curpt = locsSentStarts[i] + len(sentences[i])\n self.sentence_startPos = locsSentStarts\n \n self.taggedSentences = taggedSentences\n self.exposureDate = None\n self.blockout_range = []\n self.clauseZones = []\n \n sent_tags = []\n ##: 'IGNORE' tag breaks the timeline continuity, i.e., stops time impact zone; \n ##: 'SKIP' black out this sentence from time impact zone, and impact zone resumes after this sentence.\n for sentnumber, sentence in enumerate(sentences):\n tags = set([tg[1] for tg in taggedSentences[sentnumber]])\n \n tokens0 = nltk.word_tokenize(sentence.lower()) \n with_who_range = self.extract_standard_summary_pattern(tokens0, sentence)\n if with_who_range:\n r = (with_who_range[0]+self.sentence_startPos[sentnumber], with_who_range[1]+self.sentence_startPos[sentnumber])\n self.blockout_range.append(r)\n \n \n ##: Ignore dates in this sentence since it is about history or family\n if tags.intersection(['History', 'FamilyHistory', 'MedicalHistory']):\n #sent_tags.append('IGNORE')\n sent_tags.append('SKIP')\n continue\n \n ##: tags that breaks time continuity\n if tags.intersection(['Hospitalization']):\n sent_tags.append('IGNORE')\n continue\n \n tokens = set(tokens0)\n ##: Ignore dates in this sentence if it has a 'follow-up'\n if 'follow-up' in tokens or sentence.lower().find('follow up')>=0 or sentence.lower().find('f/u')>=0:\n sent_tags.append('IGNORE')\n continue\n \n ##: Unspecified/unknown date breaks time continuity, except this is a sentence for concomitant, which usually should not stop continuity.\n if tokens.intersection(['unknown', 'unspecified', 'unreported']) and tokens.intersection(['date', 'dates']):\n #if tokens.intersection(['unknown', 'unspecified']) and tokens.intersection(['date', 'dates']) and not tokens.intersection(['concomitant']):\n #unkSet=tokens.intersection(['unknown', 'unspecified', 'unreported'])\n sent_tags.append('IGNORE')\n continue\n \n ##: tokens that breaks time continuity\n if tokens.intersection(self.token_timeline_breakers):\n sent_tags.append('IGNORE')\n continue\n \n sent_tags.append('NORMAL')\n self.sentence_tags = sent_tags", "def __init__(self, content):\n\t\tself.raw = content\n\t\tself.mail = email.message_from_string(self.raw)\n\n\t\tself.text_content = ''\n\t\tself.html_content = ''\n\t\tself.attachments = []\n\t\tself.cid_map = {}\n\t\tself.parse()\n\t\tself.set_content_and_type()\n\t\tself.set_subject()\n\t\tself.set_from()\n\t\tself.message_id = self.mail.get('Message-ID')\n\n\n\t\tself.unique_id = get_unique_id(self.mail)\n\n\t\t# gmail mailing-list compatibility\n\t\t# use X-Original-Sender if available, as gmail sometimes modifies the 'From'\n\t\t# _from_email = self.mail.get(\"X-Original-From\") or self.mail[\"From\"]\n\t\t# \n\t\t# self.from_email = extract_email_id(_from_email)\n\t\t# if self.from_email:\n\t\t# \tself.from_email = self.from_email.lower()\n\t\t# \n\t\t# #self.from_real_name = email.utils.parseaddr(_from_email)[0]\n\t\t# \n\t\t# _from_real_name = decode_header(email.utils.parseaddr(_from_email)[0])\n\t\t# self.from_real_name = decode_header(email.utils.parseaddr(_from_email)[0])[0][0] or \"\"\n\t\t# \n\t\t# try:\n\t\t# \tif _from_real_name[0][1]:\n\t\t# \t\tself.from_real_name = self.from_real_name.decode(_from_real_name[0][1])\n\t\t# \telse:\n\t\t# \t\t# assume that the encoding is utf-8\n\t\t# \t\tself.from_real_name = self.from_real_name.decode(\"utf-8\")\n\t\t# except UnicodeDecodeError,e:\n\t\t# \tprint e\n\t\t# \tpass\n\n\t\t#self.from_real_name = email.Header.decode_header(email.utils.parseaddr(_from_email)[0])[0][0]\n\t\tself.To = self.mail.get(\"To\")\n\t\tif self.To:\n\t\t\tto = u\"\"\n\t\t\tfor name, encoding in decode_header(self.To):\n\t\t\t\tif encoding:\n\t\t\t\t\tto += name.decode(encoding)\n\t\t\t\telse:\n\t\t\t\t\tto += name\n\t\t\tself.To = to.lower()\n\t\tself.CC = self.mail.get(\"CC\")\n\t\tif self.CC:\n\t\t\tself.CC = self.CC.lower()\n\t\tif self.mail[\"Date\"]:\n\t\t\ttry:\n\t\t\t\tutc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail[\"Date\"]))\n\t\t\t\tutc_dt = datetime.datetime.utcfromtimestamp(utc)\n\t\t\t\tself.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S')\n\t\t\texcept:\n\t\t\t\tself.date = now()\n\t\telse:\n\t\t\tself.date = now()\n\t\tif self.date > now():\n\t\t\tself.date = now()", "def parse_layout(layout, mytext, line_list):\n # mytext = []\n line = \"\"\n for lt_obj in layout:\n # print(lt_obj.__class__.__name__)\n # print(lt_obj.bbox)\n if isinstance(lt_obj, LTTextLine):\n mytext.append(lt_obj) # .get_text())\n # print(lt_obj.get_text())\n # print(text)\n elif isinstance(lt_obj, LTLine):\n line_list.append(lt_obj)\n elif isinstance(lt_obj, LTCurve):\n bbox = lt_obj.bbox\n if bbox[1] == bbox[3]:\n line_list.append(lt_obj)\n elif isinstance(lt_obj, LTTextBox): # or isinstance(lt_obj, LTTextLine):\n # print(lt_obj.get_text())\n mytext, line_list = parse_layout(lt_obj, mytext, line_list) # Recursive\n\n # elif isinstance(lt_obj, LTAnno):\n # print(line)\n # print(str(lt_obj._text) + 'xx')\n # if lt_obj._text == '\\n':\n # text.append(line)\n # print(str(type(lt_obj)) + \" : \" + str(lt_obj.get_text()))\n # print(dir(lt_obj))\n # print(mytext)\n return mytext, line_list", "def _get_data_from_mail(self) -> None:\n current_email = self.outlook.ActiveInspector()\n cap = current_email.Caption\n try:\n self.keyword = next(KEYWORDS[key] for key in KEYWORDS if key in cap)\n except StopIteration:\n raise RuntimeError('This e-mail is not standard.')\n\n self.mail_text = current_email.CurrentItem.HTMLBody\n # print(self.mail_text)\n if not self.mail_text:\n raise RuntimeError('Empty e-mail.')\n event_key = EVENT_KEYS.get(self.keyword, False)\n\n parsed_html = BeautifulSoup(self.mail_text, features=\"lxml\")\n rows = parsed_html.find_all('tr')\n headers = parsed_html.find_all('h1')\n\n def table_find(text: str) -> str:\n try:\n row = next(filter(lambda x: text == x.td.string, rows))\n except StopIteration:\n row = next(filter(lambda x: text == x.td.p.text, rows))\n return row.find_all('td')[1].text\n\n def header_find(text: str) -> str:\n header = next(filter(lambda x: x.text.startswith(text), headers))\n return header.text\n\n new_object = PointOfSale()\n # Data from the table in mail body.\n new_object.object_code = table_find('Код')\n new_object.object_SAP_code = table_find('Код ТТ SAP')\n # self.object_name = table_find('Наименование')\n # self.object_address = table_find('Адрес')\n new_object.ter_dir_name = table_find('Оперативный менеджер ТТ')\n # Data from h1 tags under the table (for closing)\n if self.keyword == 'closing':\n cms = header_find('ЦМС')\n new_object.successor_full_name = cms[:cms.index(' принимает')]\n new_object.successor_name = \\\n re.match(r'ЦМС \\d+ (.*)', new_object.successor_full_name).group(1)\n if self.keyword == 'change':\n new_object.operation_mode_new = table_find('Режим работы')\n if event_key:\n date = re.search(\n r'\\d{1,2}.\\d{2}.\\d{2,4}',\n header_find(event_key)\n ).group(0)\n self.event_date = \\\n max(datetime.strptime(date, '%d.%m.%Y').date(), datetime.now().date())\n else:\n self.event_date = datetime.now().date()\n\n self.objects.append(new_object)", "def parse_maillog(logline):\n\n values = logline.split(None)\n postfix_process_name = values[4]\n\n if 'postfix/pipe' in postfix_process_name:\n # This means that the mail was delivered, most likely, so lets send it\n # over to the get_recipient_stats function to be parsed out\n print \"FOUND [pipe]\"\n get_pipe_stats(values)\n\n elif 'postfix/postscreen' in postfix_process_name:\n print \"FOUND [postscreen]\"\n get_postscreen_stats(values)\n\n elif 'postfix/smtp[' in postfix_process_name:\n print \"FOUND [smtp]\"\n get_smtp_stats(values)\n\n elif 'postfix/smtpd' in postfix_process_name:\n print \"FOUND [smtpD]\"\n get_smtpd_stats(values)\n\n elif 'postfix/qmgr' in postfix_process_name:\n print \"FOUND [qmgr]\"\n get_qmgr_stats(values)\n\n elif 'postfix/local' in postfix_process_name:\n print \"FOUND [local]\"\n get_local_stats(values)\n\n elif 'postfix/cleanup' in postfix_process_name:\n print \"FOUND [cleanup]\"\n get_cleanup_stats(values)\n\n elif 'amavis[' in postfix_process_name:\n print \"FOUND [amavis]\"\n get_amavis_stats(values)", "def process_lines(line_queue, result_queue, sim_info, analysis_info, args, merged_reads):\n\t\n\twhile True:\n\t\tline = line_queue.get()\n\t\t\n\t\t# if we're done, stop\n\t\tif line is None:\n\t\t\tresult_queue.put(None)\n\t\t\tbreak\n\t\t# skip header lines\n\t\telif line[0] == '@':\n\t\t\tcontinue\n\t\t# process this line\n\t\telse:\n\t\t\tresult = score_read(line, sim_info, analysis_info, args, merged_reads)\n\t\t\tresult_queue.put(result)", "def display_fields(self):\r\n\r\n field_text = self.show_fields()\r\n field_text_list = field_text.split(EOL)[0:-1]\r\n\r\n def fld_format (x_temp):\r\n\r\n x_temp = x_temp.split(COLON)[0], x_temp.split(COLON)[1]\r\n\r\n \"\"\"formats output of the list of search results\"\"\"\r\n\r\n if not isinstance(x_temp[1],str):\r\n shown_indexes = rangelist.range_find([int(Index(a_temp))\r\n for a_temp in x_temp[1]],reduce=True)\r\n else:\r\n shown_indexes = x_temp[1]\r\n\r\n if len(shown_indexes) < 20:\r\n return (abridge(x_temp[0]).replace(VERTLINE,SLASH)\r\n +VERTLINE\r\n +shown_indexes)\r\n\r\n\r\n returnlist = []\r\n sp_temp = rangelist.split_up_range(shown_indexes)\r\n\r\n\r\n returnlist.append(x_temp[0].replace(VERTLINE,SLASH)[0:min([60,len(x_temp[0])])]\r\n +VERTLINE+sp_temp[0])\r\n for s_temp in sp_temp[1:]:\r\n returnlist.append(VERTLINE+s_temp)\r\n\r\n return returnlist\r\n\r\n show_list(field_text_list,\r\n alerts.FIELDS[3:],0,40,\r\n func=fld_format,\r\n present=True,\r\n display=display)", "def _read_info(self):\n my_filelines = self.file_lines\n info = dict()\n\n for i, line in enumerate(my_filelines):\n if line.startswith(\"VEHICLE\"):\n vehicle_pro_start = i + 2\n elif line.startswith(\"CUSTOMER\"):\n customer_pro_start = i + 3\n\n elif line.startswith(\"NUMBER\"):\n splited = line.split(' ')\n info[splited[0]] = 0\n info[splited[-1]] = 0\n return info, (vehicle_pro_start, customer_pro_start)", "def parse_file(self):\n for num, line in enumerate(self._text):\n if \"CRYSTAL STRUCTURE SOLUTION\" in line:\n line = line.strip().strip('+').strip()\n if 'SHELXTL' in line:\n self.version = 'SHELXT ' + line.split()[-1]\n if line.strip().startswith('R1 Rweak Alpha'):\n for n in range(100):\n if not self._text[num + 1 + n]:\n break\n if self._text[num + 1]:\n self.solutions[self._text[num + 1 + n][58:76].strip()] = self._text[num + 1 + n][37:51].strip()", "def get_personal_info(line_objs):\n result = []\n start = True\n for line in line_objs:\n line_label = line.get('label')\n line_category = line.get('category')\n if line_label == 'title':\n if line_category == 'personal_info':\n start = True\n continue\n else:\n start = False\n if start:\n result.append(line)\n try:\n max_height = max([line.get('wh')[1] for line in result])\n except:\n max_height = max([line.get('wh')[1] for line in line_objs])\n track_candicate_name = False\n for line in result:\n height = line.get('wh')[1]\n if height == max_height and not track_candicate_name:\n for word in profile_words.keys():\n if word in line.get(\"text\"):\n continue\n line['label'] = 'candicate_name'\n track_candicate_name = True\n else:\n line['label'] = 'description'\n line['category'] = 'personal_info'\n return result", "def ProcessLine(line, rules, processing, previous_line_data):\n line_data = {'line':line, 'line_offset':processing['offset_processed']}\n \n # Update with always-included data, like glob keys, and the component\n line_data.update(processing['data'])\n \n # Test if this line is multi-line (positive test)\n is_multi_line = False\n for rule in rules:\n if rule.get('multi line regex test', False):\n if re.match(rule['regex'], line):\n is_multi_line = True\n break\n # Negative regex test\n for rule in rules:\n if rule.get('multi line regex not', False):\n if re.match(rule['regex'], line):\n is_multi_line = True\n break\n \n # If this is multi_line and we have a real previous line to embed this data in\n if is_multi_line and previous_line_data != None:\n #print 'Multiline: %s' % line\n if 'multiline' not in previous_line_data:\n previous_line_data['multiline'] = []\n \n previous_line_data['multiline'].append(line)\n\n\n # Only process rules on first lines (not multi lines), and return the line_data to be the next line's previous_line_data\n if not is_multi_line:\n #print line\n \n # Start with: We havent found a match yet\n match_found = False\n \n for item in rules:\n # Skip the multi-line regext test/not rules\n if item.get('multi line regex test', False) or item.get('multi line regex not', False):\n continue\n \n # Break out our terms for this rule item\n terms = re.findall('%\\((.*?)\\)s', item['regex'])\n #print item['regex']\n #print terms\n \n regex = item['regex']\n \n # Pre-processing step, to remove any conflicting characters with the rest of the regex which need to be escaped/sanitized\n for term in terms:\n regex = regex.replace('%%(%s)s' % term, 'MATCHMATCHMATCH')\n \n regex = SanitizeRegex(regex)\n regex = regex.replace('MATCHMATCHMATCH', '(.*?)')\n \n #print '--- %s' % item['id']\n #print regex\n #print line\n \n regex_result = re.findall(regex, line)\n #print regex_result\n if regex_result:\n \n # Python does something stupid with multiple variables, so pull them out of the embedded tuple it adds to the list\n if type(regex_result[0]) == tuple:\n regex_result = regex_result[0]\n \n for count in range(0, len(terms)):\n #print '%s: %s: %s' % (count, terms[count], regex_result[count])\n line_data[terms[count]] = regex_result[count]\n \n #print regex\n #print 'MATCHED! %s' % regex\n #print regex_result\n \n match_found = True\n \n # Save the line match ID, so we can reference it for markup/state information\n line_data['__rule_id__'] = item['id']\n \n break\n \n return line_data\n \n # Else, this is multi-line, so return it to continue to be the next line's previous_line_data\n else:\n #TODO(g): Save this multi-line data every time? Otherwise when does it get saved out?\n pass\n \n return previous_line_data", "def process_log_line(url_body, mark_word):\n\n global submit_result_re\n global show_result_re\n global remap_pairs\n\n show_line = False\n\n # See if line contains question results or \"show results\"\n match_obj = submit_result_re.search(url_body)\n if match_obj == None:\n match_obj = show_result_re.search(url_body)\n show_line = True\n\n # If no match was obtained, bomb out\n if match_obj == None:\n raise ValueError('Impossible URL in process_log_line:', url_body)\n\n form_name = match_obj.group('formname')\n answer_str = match_obj.group('answerstr')\n\n if not(show_line) and ((len(answer_str) % 3) != 0):\n raise ValueError('URL with incorrect format:', url_body)\n \n url = url_body.split('?')[0].replace('_bogus.html', '')\n # Get rid off the first slash\n if url[0] == '/':\n url = url[1:]\n item_name = url + form_name\n item_name = item_name.replace('/', '_')\n item_name = item_name.replace('_es.html', '_')\n item_name = item_name.replace('_en.html', '_')\n item_name = item_name.replace('.html', '_')\n if item_name[0] == '.':\n item_name = item_name[1:]\n\n # See if the question belongs to a block.\n block_name = next((y for (x, y) in remap_pairs \n if x.search(item_name) != None), None)\n\n # If it is a show line, return the \"show\" and the block name\n if show_line:\n if block_name == None:\n block_name = item_name\n return [('show', block_name)]\n\n # It is not a show line but a set of answers to (potentially) various\n # questions \n \n # create list with groups of three characters from answer_str\n answers = []\n for index in map(lambda x: 3 * x, range(len(answer_str)/3)):\n answers.append(answer_str[index:index+3])\n\n # Process each triplet in the list [abc] where\n # a is the question number within the form\n # b is the answer marked (could be zero)\n # c is C/I/0 if answer is correct, incorrect or not answered\n result = []\n for item in answers:\n # Each answer is a different item\n\n # The id needs the question index if block_name is None (no compaction)\n if block_name == None:\n question_id = item_name + '_' + item[0]\n else:\n question_id = block_name\n\n if item[2] == 'C':\n outcome = 'correct'\n elif item[2] == 'I':\n outcome = 'incorrect'\n elif item[2] == '0':\n outcome = 'blank'\n else:\n # There should be anything else other than C, I, or 0\n raise ValueError('Incorrect value in answer string:', outcome)\n result.append((outcome, question_id))\n # End of for each item in answers\n return result", "def parse_lines(lines):\n image_ids = []\n cleaned_captions = []\n\n # QUESTION 1.1\n\n for line in lines:\n # first we split the image id from caption text based on \\t\n id = line.split('\\t')[0]\n # then we extract remove .jpg#x part from image id (where x = 1 to 5)\n id = id.split('.')[0]\n # finally we extract raw text caption\n raw_caption = line.split('\\t')[1]\n # and forward to other function for cleaning the text\n caption = clean_caption(raw_caption)\n\n image_ids.append(id)\n cleaned_captions.append(caption)\n\n return image_ids, cleaned_captions", "def deal_lines(self, lines, conf):\n if lines == ['']:\n print \"NO new %s commit!\" % conf\n else:\n for line in lines:\n if re.search('\\d+ files? changed', line) is None:\n pos = line.find(' ')\n if pos != -1:\n try:\n parts = line.split(' ', 2)\n commit_id = parts[0]\n self.current_commit = commit_id\n stamp = int(parts[1])\n ti = datetime.datetime.fromtimestamp(float(stamp))\n s_time = datetime.datetime.fromtimestamp(float(0))\n if self.start_date == s_time:\n self.start_date = ti\n elif self.start_date > ti:\n self.start_date = ti\n author, mail = parts[2].split('<', 1)\n message = mail.split('> ', 1)[1]\n mail = mail.split('>', 1)[0]\n if re.search(': ', message) is not None:\n messagetype = message.split(': ', 1)[0]\n if messagetype not in CLASSIFICATION:\n messagetype = 'OTR'\n else:\n messagetype = 'OTR'\n if commit_id not in self.commit_dictionary:\n self.commit_dictionary[commit_id]\\\n = [commit_id, mail,\n stamp, messagetype,\n messagetype, 0, 0, 0, 0]\n # [files, inserted, deleted, total_lines]\n if mail not in self.author_dictionary:\n self.author_dictionary[mail] = [author,\n mail, 0, 0,\n 0, 0, 1,\n stamp]\n # [files,inserted,deleted,total_lines,commit,stamp]\n else:\n self.author_dictionary[mail][6] += 1\n if stamp > self.author_dictionary[mail][7]:\n self.author_dictionary[mail][7] = stamp\n self.total_patches += 1\n except:\n print 'Warning: unexpected line \"%s\"' % line\n else:\n if conf == 'no_merges':\n try:\n commit_id = self.current_commit\n numbers = self.getstatsummarycounts(line)\n if len(numbers) == 3:\n (files, inserted, deleted) = \\\n map(lambda el: int(el), numbers)\n total_lines = inserted - deleted\n self.commit_dictionary[commit_id][5] = files\n self.commit_dictionary[commit_id][6] = inserted\n self.commit_dictionary[commit_id][7] = deleted\n self.commit_dictionary[commit_id][8] = total_lines\n self.author_dictionary[mail][2] += files\n self.author_dictionary[mail][3] += inserted\n self.author_dictionary[mail][4] += deleted\n self.author_dictionary[mail][5] += total_lines\n self.total_lines_inserted += inserted\n self.total_lines_deleted += deleted\n self.total_lines += total_lines\n self.current_commit = None\n except:\n print 'Warning: unexpected line \"%s\"' % line", "def _add_to_fields(self, tag, data):\n # Addressee data\n if 'Recipient' == tag:\n names = data.split()\n for name in names:\n self.__fields.addressee_line['all_names'].append(name) \n\n # Probable box data\n # Strip out anything that's not a number since we might get some other\n # data inside here also. If the box # can be a subnumber (BOX 102-A) then\n # we'll end up putting everything in the # only.\n if 'USPSBoxGroupID' == tag or 'USPSBoxGroupType' == tag or \\\n 'USPSBoxID' == tag or 'USPSBoxType' == tag or \\\n 'OccupancyType' == tag or 'OccupancyIdentifier' == tag or \\\n 'SubaddressType' == tag or 'SubaddressIdentifier' == tag:\n box = re.search('\\d+', data)\n if box is not None:\n self.__fields.probable_box.append(box.group(0)) \n\n # Street data\n # Discarding street number prefix and suffix for now\n if 'AddressNumber' == tag:\n self.__fields.street_line['number'].append(data) \n if 'StreetName' == tag:\n self.__fields.street_line['street_name'].append(data) \n\n # City data\n if 'PlaceName' == tag:\n self.__fields.city_line['city'].append(data) \n if 'StateName' == tag:\n self.__fields.city_line['state'].append(data) \n if 'ZipCode' == tag:\n self.__fields.city_line['zip_code'].append(data)", "def real_process(raw):\n\n prod = product.TextProduct(raw)\n pil = prod.afos[:3]\n wfo = prod.source[1:]\n # sigh, can't use originating center for the route\n if (pil == \"OEP\"):\n wfo = prod.afos[3:]\n\n #raw = raw.replace(\"'\", \"\\\\'\")\n sqlraw = raw.replace(\"\\015\\015\\012\", \"\\n\").replace(\"\\000\", \"\").strip()\n\n # FTM sometimes have 'garbage' characters included, get em out\n #if (pil == \"FTM\"):\n # sqlraw = re.sub(\"[^\\n\\ra-zA-Z0-9:\\.,\\s\\$\\*]\", \"\", sqlraw)\n\n # Always insert the product into the text archive database\n product_id = prod.get_product_id()\n sql = \"\"\"INSERT into text_products(product, product_id) values (%s,%s)\"\"\"\n myargs = (sqlraw, product_id)\n if (len(prod.segments) > 0 and prod.segments[0].sbw):\n giswkt = 'SRID=4326;%s' % (MultiPolygon([prod.segments[0].sbw]).wkt,)\n sql = \"\"\"INSERT into text_products(product, product_id, geom) values (%s,%s,%s)\"\"\" \n myargs = (sqlraw, product_id, giswkt)\n deffer = POSTGIS.runOperation(sql, myargs)\n deffer.addErrback( common.email_error, sqlraw)\n myurl = \"%s?pid=%s\" % (config.get('urls', 'product'), product_id)\n\n xtra = {\n \"product_id\": product_id,\n }\n\n # Just send with optional headline to rooms...\n if SIMPLE_PRODUCTS.__contains__(pil):\n xtra['channels'] = wfo\n if pil in NEW_ROUTING:\n xtra['channels'] = prod.afos\n prodtxt = \"(%s)\" % (pil,)\n if reference.prodDefinitions.has_key(pil):\n prodtxt = reference.prodDefinitions[pil]\n\n mess = \"%s: %s issues %s %s\" % (wfo, wfo, prodtxt, myurl)\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s</a> \" % (centertext.get(wfo,wfo), myurl, prodtxt)\n if (not [\"HWO\",\"NOW\",\"ZFP\"].__contains__(pil) and \n len(prod.segments) > 0 and \n len(prod.segments[0].headlines) > 0 and \n len(prod.segments[0].headlines[0]) < 200 ):\n htmlmess += \"... %s ...\" % (prod.segments[0].headlines[0],)\n\n jabber.sendMessage(mess, htmlmess, xtra)\n\n channels = [wfo,]\n if pil in NEW_ROUTING:\n channels = [prod.afos,]\n # TODO: remove manual hack\n if prod.afos == 'RFDBIS':\n channels = ['BIS',]\n # Also send message to any 'subscribing WFO chatrooms'\n for key in routes.keys():\n if (re.match(key, prod.afos)):\n for wfo2 in routes[key]:\n mess = \"%s: %s issues %s %s\" % \\\n (wfo2, wfo, prodtxt, myurl)\n jabber.sendMessage(mess, htmlmess, xtra)\n channels.append( wfo2 )\n\n twt = prodtxt\n url = myurl\n common.tweet(channels, twt, url)\n if prod.afos == \"PNSARX\":\n snowfall_pns(prod)\n # We are done for this product\n return\n\n\n # Now, lets look at segments ?\n if (pil == \"RVF\"):\n for seg in prod.segments:\n tokens = re.findall(\"\\.E ([A-Z0-9]{5}) \", seg.raw)\n if (len(tokens) == 0):\n print 'Whoa, did not find NWSLI?', seg\n return\n hsas = re.findall(\"HSA:([A-Z]{3}) \", seg.raw)\n prodtxt = reference.prodDefinitions[pil]\n mess = \"%s: %s issues %s\" % \\\n (wfo, wfo, prodtxt)\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s</a> for \" \\\n % (wfo, myurl, prodtxt)\n usednwsli = {}\n hsa_cnt = -1\n rivers = {}\n for nwsli in tokens:\n if usednwsli.has_key(nwsli):\n continue\n usednwsli[nwsli] = 1\n hsa_cnt += 1\n if (nwsli_dict.has_key(nwsli)):\n rname = nwsli_dict[nwsli]['rname']\n r = nwsli_dict[nwsli]['river']\n else:\n rname = \"((%s))\" % (nwsli,)\n r = \"Unknown River\"\n if not rivers.has_key(r):\n rivers[r] = \"<br/>%s \" % (r,)\n if len(hsas) > hsa_cnt and \\\n reference.wfo_dict.has_key( hsas[hsa_cnt] ):\n uri = AHPS_TEMPLATE[ reference.wfo_dict[hsas[hsa_cnt]]['region'] ] %\\\n (hsas[hsa_cnt].lower(), nwsli.lower() ) \n rivers[r] += \"<a href=\\\"%s\\\">%s</a> (%s), \" % (uri, rname, nwsli)\n else:\n rivers[r] += \"%s (%s), \" % (rname, nwsli)\n for r in rivers.keys():\n htmlmess += \" %s\" % (rivers[r][:-2],)\n jabber.sendMessage(mess[:-1] +\" \"+ myurl, htmlmess[:-1], xtra)\n continue\n\n# PUBLIC ADVISORY NUMBER 10 FOR REMNANTS OF BARRY\n# TROPICAL DEPRESSION BARRY ADVISORY NUMBER 5\n# TROPICAL STORM BARRY INTERMEDIATE ADVISORY NUMBER 2A\n\n if (pil == \"TCM\" or pil == \"TCP\" or pil == \"TCD\"):\n mess = \"%s: %s issues %s %s\" % (wfo, wfo, pil, myurl)\n prodtxt = \"(%s)\" % (pil,)\n if reference.prodDefinitions.has_key(pil):\n prodtxt = reference.prodDefinitions[pil]\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s</a> \" % (wfo, myurl, prodtxt)\n jabber.sendMessage(mess, htmlmess, xtra)\n \n common.tweet([wfo], prodtxt, myurl)\n\n\n for key in routes.keys():\n if (re.match(key, prod.afos)):\n channels = []\n for wfo2 in routes[key]:\n mess = \"%s: %s %s\" % \\\n (wfo2, prod.afos, myurl)\n htmlmess = \"<a href=\\\"%s\\\">%s</a>\" % (myurl, prodtxt)\n tokens = re.findall(\"(.*) (DISCUSSION|INTERMEDIATE ADVISORY|FORECAST/ADVISORY|ADVISORY|MEMEME) NUMBER\\s+([0-9]+)\", raw.replace(\"PUBLIC ADVISORY\", \"ZZZ MEMEME\") )\n if (len(tokens) > 0):\n tt = tokens[0][0]\n what = tokens[0][1]\n tnum = tokens[0][2]\n if (tokens[0][1] == \"MEMEME\"):\n tokens2 = re.findall(\"(PUBLIC ADVISORY) NUMBER\\s+([0-9]+) FOR (.*)\", raw)\n what = tokens2[0][0]\n tt = tokens2[0][2]\n mess = \"%s: %s issues %s #%s for %s %s\" % (wfo2, centertext.get(wfo, wfo), what, tnum, tt, myurl)\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s #%s</a> for %s\" % ( centertext.get(wfo, wfo), myurl, what, tnum, tt)\n #print htmlmess, mess\n jabber.sendMessage(mess, htmlmess, xtra)\n channels.append( wfo2 )\n twt = \"%s issues %s %s for %s\" % (centertext.get(wfo, wfo), what, tnum, tt)\n common.tweet(channels, twt, myurl)\n\n\n for seg in prod.segments:\n # The segment needs to have ugc codes\n if (len(seg.ugcs) == 0):\n continue\n # If the product has VTEC, it is handled by the vtec ingestor\n if (len(seg.vtec) > 0 and ['MWS','HLS'].__contains__(pil)):\n log.msg(\"VTEC FOUND!, skipping\")\n continue\n\n # If the product has HVTEC, it is handled by other ingestor too\n if (len(seg.hvtec) > 0 and ['FLW','FFA','FLS'].__contains__(pil)):\n log.msg(\"HVTEC FOUND!, skipping\")\n continue\n\n counties = countyText(seg.ugcs)\n if (counties.strip() == \"\"):\n counties = \"entire area\"\n expire = \"\"\n if seg.ugcexpire is not None:\n if prod.z:\n expire = \"till \"+ (seg.ugcexpire - datetime.timedelta(hours= reference.offsets[prod.z] )).strftime(\"%-I:%M %p \")+ prod.z\n\n prodtxt = \"(%s)\" % (pil,)\n if reference.prodDefinitions.has_key(pil):\n prodtxt = reference.prodDefinitions[pil]\n mess = \"%s: %s issues %s for %s %s %s\" % \\\n (wfo, wfo, prodtxt, counties, expire, myurl)\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s</a> for %s %s\" % (wfo, myurl, prodtxt, counties, expire)\n jabber.sendMessage(mess, htmlmess, xtra)\n twt = \"%s for %s %s\" % (prodtxt, counties, expire)\n common.tweet([wfo,], twt, myurl)\n\n# PUBLIC ADVISORY NUMBER 10 FOR REMNANTS OF BARRY\n# TROPICAL DEPRESSION BARRY ADVISORY NUMBER 5\n# TROPICAL STORM BARRY INTERMEDIATE ADVISORY NUMBER 2A\n\n if (pil == \"TCM\" or pil == \"TCP\" or pil == \"TCD\"):\n mess = \"%s: %s issues %s %s\" % (wfo, wfo, pil, myurl)\n prodtxt = \"(%s)\" % (pil,)\n if reference.prodDefinitions.has_key(pil):\n prodtxt = reference.prodDefinitions[pil]\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s</a> \" % (wfo, myurl, prodtxt)\n jabber.sendMessage(mess, htmlmess, xtra)\n common.tweet([wfo,], prodtxt, myurl)\n\n\n\n for key in routes.keys():\n if (re.match(key, prod.afos)):\n channels = []\n for wfo2 in routes[key]:\n mess = \"%s: %s %s\" % \\\n (wfo2, prod.afos, myurl)\n htmlmess = \"<a href=\\\"%s\\\">%s</a>\" % (myurl, prodtxt)\n tokens = re.findall(\"(.*) (DISCUSSION|INTERMEDIATE ADVISORY|FORECAST/ADVISORY|ADVISORY|MEMEME) NUMBER\\s+([0-9]+)\", raw.replace(\"PUBLIC ADVISORY\", \"ZZZ MEMEME\") )\n if (len(tokens) > 0):\n tt = tokens[0][0]\n what = tokens[0][1]\n tnum = tokens[0][2]\n if (tokens[0][1] == \"MEMEME\"):\n tokens2 = re.findall(\"(PUBLIC ADVISORY) NUMBER\\s+([0-9]+) FOR (.*)\", raw)\n what = tokens2[0][0]\n tt = tokens2[0][2]\n mess = \"%s: %s issues %s #%s for %s %s\" % (wfo2, centertext.get(wfo, wfo), what, tnum, tt, myurl)\n htmlmess = \"%s issues <a href=\\\"%s\\\">%s #%s</a> for %s\" % ( centertext.get(wfo, wfo), myurl, what, tnum, tt)\n #print htmlmess, mess\n jabber.sendMessage(mess, htmlmess, xtra)\n channels.append( wfo2 )\n twt = \"%s issues %s %s for %s\" % (centertext.get(wfo, wfo), what, tnum, tt)\n common.tweet(channels, twt, myurl)", "def _collect_lines( self, diffs ):\n\n\t\tfromlist, tolist, flaglist = [], [], []\n\t\t# pull from/to data and flags from mdiff style iterator\n\t\tfor fromdata, todata, flag in diffs:\n\t\t\ttry:\n\t\t\t\t# store HTML markup of the lines into the lists\n\t\t\t\tfromlist.append( self._format_line( 0, flag, *fromdata ) )\n\t\t\t\ttolist.append( self._format_line( 1, flag, *todata ) )\n\t\t\texcept TypeError:\n\t\t\t\t# exceptions occur for lines where context separators go\n\t\t\t\tfromlist.append( None )\n\t\t\t\ttolist.append( None )\n\t\t\tflaglist.append( flag )\n\t\treturn fromlist, tolist, flaglist", "def handle_data(self, text):\n if self.bankacctfrom:\n if self.bankid:\n self.compte['banque'] = text.strip()\n self.bankid = False\n if self.branchid:\n self.compte['guichet'] = text.strip()\n self.branchid = False\n if self.acctid:\n self.compte['compte'] = text.strip()\n self.acctid = False\n if self.banktranlist:\n if self.stmttrn:\n if self.dtposted:\n self.ecriture_tmp['date'] = datetime.strptime(text.strip(), \"%Y%m%d\")\n self.dtposted = False\n if self.trnamt:\n self.ecriture_tmp['montant'] = locale.atof(text.strip())\n self.trnamt = False\n if self.trntype:\n self.ecriture_tmp['type'] = text.strip()\n self.trntype = False\n if self.name:\n self.ecriture_tmp['name'] = text.strip()\n self.name = False\n if self.memo:\n self.ecriture_tmp['memo'] = text.strip()\n self.memo = False", "def format_ocr_text(self, page):\n \n #read out of the text file that tesseract made\n ocr_text = open(self.ocr_text, 'r')\n \n # write into this file\n djvu_text = open( self.djvu_text, 'w' )\n \n text = \"(page 0 0 1 1\\n\"\n \n self.out_text.write('\\n## Page %d ###\\n\\n' % page )\n \n for line in ocr_text:\n \n #write to the human readable file\n self.out_text.write(line)\n \n # add each line of text\n # escaping \" to \\\" as we go\n text += '(line 0 0 1 1 \"%s\")\\n' % line.replace('\"', r'\\\"').strip()\n \n text += \")\\n\"\n \n djvu_text.write( text )\n \n ocr_text.close()\n djvu_text.close()", "def process_lines(self, lines):\n line_index = 0\n n_lines = len(lines)\n while line_index < n_lines:\n if lines[line_index].startswith(\"HIERARCHY\"):\n line_index = self._read_skeleton(lines, line_index, n_lines)\n if lines[line_index].startswith(\"MOTION\"):\n self._read_frametime(lines, line_index+2)\n line_index = self._read_frames(lines, line_index+3, n_lines)\n else:\n line_index += 1", "def treat_new_line(self,text):\n text=text.replace('.\\n','. ')\n text=re.sub(r'(\\n\\s*)+\\n+', '\\n\\n',text )\n \n lw=text.split('\\n\\n')\n lw=[c for c in lw if c.replace(' ','')!='']\n \n for i in range(1,len(lw)):\n try:\n\n el=lw[i]\n if len(el)>=1:\n try:\n first_w=el.split()[0]\n except:\n first_w=el\n first_l=first_w[0]\n if first_l.isupper() :\n if len(lw[i-1])>0 and lw[i-1].replace(' ','') !='':\n if lw[i-1].replace(' ','')[-1] not in [\":\",'.',\"-\",'/',\"'\",\";\"]:\n prec=lw[i-1].split(\".\")[-1]\n merge=(prec+' '+lw[i]).split()\n dic=dict(nltk.tag.pos_tag(merge))\n proper_noun=dic[first_w]=='NNP'\n if not proper_noun:\n if not \".\" in lw[i-1]:\n lw[i-1]=lw[i-1]+\".\\n\\n \"\n else:\n lw[i-1]=lw[i-1][:-1]+\".\\n\\n \"\n else:\n lw[i-1]+=' '\n\n\n elif first_l.islower():\n if len(lw[i-1])>0 and lw[i-1][-1].replace(' ','')!='':\n\n if lw[i-1][-1].replace(' ','')[-1]!='-':\n lw[i-1]+=\"\"\n else:\n\n ltemp_prev=lw[i-1].split(' ')\n ltemp_next=lw[i].split(' ')\n motprev=ltemp_prev[-1][:-1]\n motnext=lw[i].split(' ')[0]\n if len((motprev+' '+motnext).split())==2:\n\n if self.english_voc.check(motprev) and self.english_voc.check(motnext) and not self.english_voc.check(\"\".join([motprev,motnext])) :\n newmot=\" \".join([motprev,motnext])\n else:\n newmot=\"\".join([motprev,motnext])\n ltemp_prev[-1]=newmot\n ltemp_next[0]=\"\"\n lw[i-1]=\" \".join(ltemp_prev)\n lw[i]=\" \".join(ltemp_next)\n else:\n lw[i-1]+=\"\\n\\n\"\n \n except:\n print('Error occurs, the reader may not be suitable for your pdf files')\n \n \n text=\"\".join(lw)\n \n lw=text.split('\\n')\n lw=[c for c in lw if c.replace(' ','')!='']\n for i in range(1,len(lw)):\n try:\n el=lw[i]\n if len(el)>=1:\n try:\n first_w=el.split()[0]\n except:\n first_w=el\n first_l=first_w[0]\n if first_l.isupper() :\n if len(lw[i-1])>0 and lw[i-1].replace(' ','')!='':\n if lw[i-1].replace(' ','')[-1] not in [\":\",'.',\"-\",'/',\"'\",\";\"]:\n prec=lw[i-1].split(\".\")[-1]\n merge=(prec+' '+lw[i]).split()\n dic=dict(nltk.tag.pos_tag(merge))\n proper_noun=dic[first_w]=='NNP'\n if not proper_noun:\n if not \".\" in lw[i-1]:\n lw[i-1]=lw[i-1]+\".\\n\\n \"\n else:\n lw[i-1]=lw[i-1][:-1]+\".\\n\\n \"\n else:\n lw[i-1]+=' '\n elif first_l.islower():\n if len(lw[i-1])>0 and lw[i-1].replace(' ','')!='':\n if lw[i-1].replace(' ','')[-1]==\"-\":\n ltemp_prev=lw[i-1].split(' ')\n ltemp_next=lw[i].split(' ')\n motprev=ltemp_prev[-1][:-1]\n motnext=lw[i].split(' ')[0]\n if len((motprev+' '+motnext).split())==2:\n if self.english_voc.check(motprev) and self.english_voc.check(motnext) and not self.english_voc.check(\"\".join([motprev,motnext])) :\n newmot=\" \".join([motprev,motnext])\n else:\n newmot=\"\".join([motprev,motnext])\n ltemp_prev[-1]=newmot\n ltemp_next[0]=\"\"\n lw[i-1]=\" \".join(ltemp_prev)\n lw[i]=\" \".join(ltemp_next)\n\n\n\n else:\n lw[i-1]+=\" \"\n else:\n lw[i-1]+=\" \"\n \n except:\n print('Error occurs, the reader may not be suitable for your pdf files')\n \n text=\"\".join(lw)\n return text", "def get_contacts_from_txt(cls, filename):\n struc_contact_dict = {}\n contact_list = None\n current_key = 0\n\n struct_Es = []\n\n # regex pattern\n pattern = \"(Structure)\\s+(\\d+)\\s+(-\\d+\\.\\d+)\\s+(\\d+\\.\\d+)\"\n regex = re.compile(pattern)\n\n with open(filename) as f:\n for ind, line in enumerate(f):\n m = re.search(string=line, pattern=regex)\n\n # structure heading lines\n if m is not None:\n\n # save list to current key before we re-initialize the list\n # for the next key\n if contact_list is not None:\n struc_contact_dict[current_key] = contact_list\n\n contact_list = []\n\n # create new key\n current_key = \"_\".join(str(x) for x in list(m.groups()))\n\n # add energy to list\n struct_Es.append(float(m.groups()[2]))\n\n # contact indices lines\n if m is None:\n\n # transform line to list of integers that will go into expander fcn\n expander_input = [int(i) for i in line.split()]\n\n contacts = cls.expander(expander_input[0], expander_input[1], expander_input[2])\n\n # add contacts to list for this structure key\n contact_list = contact_list + contacts\n\n # checks for end of file\n if 'str' in line:\n break\n\n # add last entry\n struc_contact_dict[current_key] = contact_list\n\n return struc_contact_dict, struct_Es", "def analyse(self):\n logging.info(\"transferring text to CorpusCook...\")\n\n paragraphs = self.text.split('\\n\\n')\n print(\"mean length of splitted lines\", (mean([len(p) for p in paragraphs])))\n\n # If TIKA resolved '\\n'\n if (mean([len(p) for p in paragraphs])) > 80:\n paragraphs = [re.sub(r\"- *\\n\", '', p) for p in paragraphs]\n paragraphs = [p.replace('\\n', \" \") for p in paragraphs]\n paragraphs = [p.replace(';', \" \") for p in paragraphs]\n joiner = \" \"\n else:\n # If TIKA did not\n joiner = \" \"\n\n processed_text = joiner.join([p\n for p in paragraphs\n if\n p and\n ks_2samp(self.normal_data, list(p)).pvalue > self.threshold\n ]\n )\n\n return processed_text.strip()[:self.length_limit]", "def findfield(parsed_field,textbounds):\n x=len(parsed_field)\n for i in range(len(textbounds)-x):\n h=0\n for j in range(len(parsed_field)): \n if textbounds[i+j].text.strip()==parsed_field[j].strip(): #similarity condition\n h=h+1\n if h==len(parsed_field):\n return [{'x':textbounds[i].x_start,'y':textbounds[i].y_start},{'x':textbounds[i+h-1].x_end,'y':textbounds[i+h-1].y_end}]\n return []", "def preprocessingSMS(textLine):\n\treturn textLine.split(None, 1)[1]", "def process(raw):\n #global weekNum\n field = None\n entry = {}\n cooked = []\n number = -1\n\n for line in raw:\n log.debug(\"Line: {}\".format(line))\n line = line.strip()\n if len(line) == 0 or line[0] == \"#\":#if # is the first character, skip\n log.debug(\"Skipping\")\n continue\n parts = line.split(':')#split lines to before and after \":\"\n if len(parts) == 1 and field:#adds additional content to whatever the previously used field is\n entry[field] = entry[field] + line + \" \" \n continue\n if len(parts) == 2:#if there are 2 parts, the field is the first part and the content is the second part\n field = parts[0]\n content = parts[1]\n else:#if none of the above are correct there is an issue\n raise ValueError(\"Trouble with line: '{}'\\n\".format(line) +\n \"Split into |{}|\".format(\"|\".join(parts)))\n\n if field == \"begin\":#checking if this is the line with the start date\n try:#begin only triggers once (at least it should only trigger once)\n base = arrow.get(content, \"MM/DD/YYYY\")#get the date as an object named \"base\", will need to use this to determine start date and current week, arrow must have a \"current date\"?\n # base is the \"week 1\" date, DD = 1, DD + 7 = 2, DD + 14 = 3, DD + 21 = 4, etc\n #now i will make variables for the start date of each week, or find a way to take the difference between 2 dates\n #end = base#arrow.get(base, \"MM/DD/YYYY\")\n #end = end.shift(weeks=+10)\n #today = arrow.now()\n #today.format(\"MM/DD/YYYY\")\n #if today == base:\n # weekNum = 1\n #number = -1\n \"\"\"weeks = [base, base.shift(days=+7), base.shift(days=+14), base.shift(days=+21), base.shift(days=+28), base.shift(days=+35), base.shift(days=+42), base.shift(days=+49), base.shift(days=+56), base.shift(days=+63), base.shift(days=+70)]\n today = arrow.now()\n for i in range(0,9):\n if weeks[i] <= today <= weeks[i+1]:\n number = i+1\n if today > weeks[10]:\n number = 10\n elif today < weeks[0]:\n number = 0\n #base = arrow.format(\"MM/DD/YYYY\")\n else:\n raise ValueError(\"Big error calculating week\")\n #for index in range(1,70):\n # base = base.shift(days=+1)\n # if today == base:\n # weekNum = weekNum + (index % 7)\n # break \n base = base.format(\"MM/DD/YYYY\")\"\"\"\n except:\n raise ValueError(\"Unable to parse date {}\".format(content))#date is incorrectly formatted, should be MM/DD/YYYY\n #now I need to check if either of these weeks is the current week\n# for r in arrow.Arrow.span_range('day',\n elif field == \"week\":#this is the week number\n if entry:\n cooked.append(entry)\n entry = {}#make entry empty again\n #if content == currentWeekNum:\n #print(\"Content: \" + content)\n #print(\"Week Number: \" + currentWeekNum + \"\\n\")\n #print(\"Is Current Week?\" + currentWeekBool + \"\\n\")\n # currentWeekBool = True\n entry['topic'] = \"\"#these are all \"classes\" in the HTML document\n entry['project'] = \"\"\n entry['week'] = content#put the week number into the \"week\" field in the html document\n #entry['isCurrentWeek'] = currentWeekBool\n #currentWeekBool = False\n #if content == weekNum:\n # entry['bool'] = True\n #else:\n # entry['bool'] = True\n \"\"\"if \n if content == currentWeekNum:\n entry['isCurrentWeek'] = True\n else:\n entry['isCurrentWeek'] = False\"\"\"\n\n elif field == 'topic' or field == 'project':#from if len == 2, set the entry for the field to the content in the html doc\n entry[field] = content\n\n else:\n raise ValueError(\"Syntax error in line: {}\".format(line))\n #entryn = entry + \"\\n\"\n\t#cookedn = cooked + \"\\n\"\n\t#fieldn = field + \"\\n\"\n\t#print(\"Entry: \" + entryn)\n #print(\"Cooked: \" + cookedn)\n #print(\"Field: \" + fiieldn)\n if entry:#appends whatever added stuff to the whole docuemnt\n cooked.append(entry)\n\t#returns formatted document after it has been looped throughi\n #number = getWeekNum(raw)\n weeks = [base, base.shift(days=+7), base.shift(days=+14), base.shift(days=+21), base.shift(days=+28), base.shift(days=+35), base.shift(days=+42), base.shift(days=+49), base.shift(days=+56), base.shift(days=+63), base.shift(days=+70)]\n today = arrow.now()\n for i in range(0,9):\n if weeks[i] <= today <= weeks[i+1]:\n number = i+1\n return [cooked, i+1]\n if today < weeks[0]:\n number = 0\n else:\n number = 10\n return [cooked, number]", "def mo_parse_p(self, filepath):\n\n # Now, can reprocess using tesseract-ocr rather than pdftotext\n ptext = textract.process(filepath, method='tesseract', encoding='utf-8')\n ptext = ptext.replace(b'\\xe2\\x80\\x94', b'-')\n ptext = ptext.decode('utf-8')\n keys = list(self.mo_coefficient_name_map.keys())\n\n # Get the calibration date:\n for line in ptext.splitlines():\n if 'CALIBRATION DATE' in line:\n items = line.split()\n ind = items.index('DATE:')\n cal_date = items[ind+1]\n cal_date = pd.to_datetime(cal_date).strftime('%Y%m%d')\n self.date.update({len(self.date): cal_date})\n\n if 'psia S/N' in line:\n items = line.split()\n ind = items.index('psia')\n prange = items[ind-1]\n name = self.mo_coefficient_name_map.get('prange')\n self.coefficients.update({name: prange})\n\n # Loop through each line looking for the lines which contain\n # calibration coefficients\n if '=' in line:\n # Tesseract-ocr misreads '0' as O, and 1 as IL\n line = line.replace('O', '0').replace('IL', '1').replace(\n '=', '').replace(',.', '.').replace(',', '.')\n line = line.replace('L', '1').replace('@', '0').replace('l', '1').replace('--', '-')\n if '11' in line and 'PA2' not in line:\n line = line.replace('11', '1')\n items = line.split()\n for n, k in enumerate(items):\n if k.lower() in keys:\n try:\n float(items[n+1])\n name = self.mo_coefficient_name_map.get(k.lower())\n self.coefficients.update({name: items[n+1]})\n except:\n pass\n if 'CC_ptcb2' not in list(self.mo_coefficient_name_map.keys()):\n self.coefficients.update({'CC_ptcb2': '0.000000e+000'})", "def parse_results_file(filename):\n\tfile = open(filename, 'r')\n\tpretext=[line for line in file.readlines() if line.strip()]\n\tfile.close()\n\n\ttext = []\n\tprocessed = []\n\tlanguages = 'NONE'\n\tID = 'NONE'\n\t\n\tmoreheader = raw_input('Extra header labels from question field (e.g.: item,condition,factor1,factor2): ')\n\tstim_type = raw_input('What type are your stims? (i.e. AcceptabilityJudgment): ')\n\toutput_loc = raw_input('Where would you like to put your parsed file? (enter filename path): ')\n\t\n\t#takes out comments\n\tfor line in pretext:\n\t\tif re.match('#', line):\n\t\t\tcontinue\n\t\telse:\n\t\t\ttext.append(line)\n\n\tfirst = 1;\n\n\tfor line in range(len(text)):\n\t\t#get their info\n\t\tif re.search('Form', text[line]):\n\t\t\tif re.search('number', text[line]):\n\t\t\t\tID = re.split('number,', text[line])[1].strip()\n\t\t\telif re.search('age', text[line]):\n\t\t\t\tlanguages = re.split('age,', text[line])[1].strip()\n\n\t\t#looks for the main stimulus type, as entered earlier\t\t\n\t\tif re.search(stim_type, text[line]):\n\t\t\tif first:\n\t\t\t\t#print 'first'\n\t\t\t\tprocessed.append(str(ID+ ','+languages+','+text[line]))\n\t\t\t\tfirst=0\n\t\t\telse:\n\t\t\t\ttoAmend = processed.pop()\n\t\t\t\t#print str('toAmend: ' + toAmend)\n\t\t\t\ttoAdd=''\n\t\t\t\tsplits = re.split('NULL,', text[line])\n\t\t\t\tfor thing in splits[1:]:\n\t\t\t\t\tif thing is not '':\n\t\t\t\t\t\ttoAdd = str(toAdd + ',' + thing.strip(','))\n\t\t\t\t#print str('toAdd: ' + toAdd)\n\t\t\t\tprocessed.append(str(toAmend.strip()+ toAdd))\n\t\t\t\tfirst = 1\n\n\t\t#if the line is a question line, there's more to append\n\t\tif re.search('Question', text[line]):\n\t\t\ttoAmend = processed.pop()\n\t\t\tpart = re.split('\\$', text[line])[1]\n\t\t\tpart.strip('$')\n\t\t\tparts = part.split('%2C')\n\t\t\tprocessed.append(str(toAmend.strip()+ ','+ string.join(parts, ',')+'\\n'))\n\t\t\t\n\toutput = open(output_loc, 'w')\n\n\theader = 'ID,Languages,Time sent,MD5 Hash of IP Address,Controller,Item Number,Element Number,Type,Group,Stimulus,Answer,RT,'\n\n\toutput.write(str(header+moreheader+'\\n'))\n\n\t#put it all into a text file\n\tfor line in processed:\n\t\toutput.write(line)\n\toutput.close()", "def process(bio):\n l3 = Level3File(bio)\n del bio\n ctx = {}\n ctx[\"nexrad\"] = l3.siteID\n ctx[\"ts\"] = l3.metadata[\"vol_time\"].replace(tzinfo=pytz.UTC)\n ctx[\"lines\"] = []\n if not hasattr(l3, \"graph_pages\"):\n LOG.info(\"%s %s has no graph_pages\", ctx[\"nexrad\"], ctx[\"ts\"])\n return ctx\n for page in l3.graph_pages:\n for line in page:\n if \"text\" in line:\n ctx[\"lines\"].append(line[\"text\"])\n df = PGCONN.runInteraction(really_process, ctx)\n df.addErrback(common.email_error, ctx)\n return ctx", "def parse_text(self):\n self.text={}\n for i, lang in enumerate(LANGS):\n text=file(self.src).read()\n self.text[lang]=\"\"\n extracted, finish = \"\", 0\n start_string, stop_string = r\"<!--%s-->\" % lang, r\"<!--/%s-->\" % lang\n # Iterates to check multiple blocks of text within the file!\n # Pay attention to infinite loops!\n # AttributeError exception raised when no more blocks to extract exist\n while True:\n try:\n start=re.compile(start_string, re.IGNORECASE).search(text).span()[1]\n finish=re.compile(stop_string, re.IGNORECASE).search(text).span()[0]\n extracted+=text[start:finish]\n text=text[finish+1:]\n except AttributeError:\n break\n self.text[lang]+=extracted", "def split_report(self, lines):\n start_line = 0\n end_line = 0\n fio_reports = []\n for index in range(len(lines)):\n if self._is_start(lines[index]):\n start_line = index\n end_line = 0\n logging.debug(\"Found start line %s\" % lines[index])\n logging.debug(\"Start line index is %s\" % start_line)\n\n if self._is_end(lines[index]):\n end_line = index\n logging.debug(\"Found end line %s\" % lines[index])\n logging.debug(\"End line index is %s\" % end_line)\n parse_lines = lines[start_line:end_line]\n logging.debug(\"Parsing text: %s\" % parse_lines)\n fio_data = self.parse_report(\"\\n\".join(parse_lines))\n fio_reports.append(fio_data)\n\n return fio_reports", "def process_messages(imap, messages):\n for i in messages:\n # fetch the email message by ID\n res, msg = imap.fetch(str(i), \"(RFC822)\")\n for response in msg:\n if isinstance(response, tuple):\n # parse bytes email into a message object\n msg = email.message_from_bytes(response[1])\n #print(msg.keys())\n\n # decode the email subject\n subject = decode_header(msg[\"Subject\"])[0][0]\n if isinstance(subject, bytes):\n # if it's a bytes, decode to str\n subject = subject.decode()\n\n # decode email sender\n From, encoding = decode_header(msg.get(\"From\"))[0]\n if isinstance(From, bytes):\n From = From.decode(encoding)\n\n # decode email Date\n Date, encoding = decode_header(msg.get(\"Date\"))[0]\n if isinstance(From, bytes):\n Date = Date.decode(encoding)\n\n print(\"Subject: \", subject)\n print(\"From: \", From)\n print(\"Date: \", Date)\n\n print(\"=\"*100)", "def pre_process(self, raw_text):\n # remove the space or other symbols\n word_lists = re.split(r'\\s+', raw_text.strip())\n if len(word_lists) < 2:\n print(word_lists)\n # exit(1)\n sent_index = word_lists[0]\n word_lists = ''.join(word_lists[1:])\n # word_lists = re.split(r'。', word_lists)\n # sent_content = ''.join(word_lists)\n return sent_index, word_lists", "def _read_header_line_1(self, lines: list) -> dict:\n fields = (\n \"model_id\",\n \"unit_id\",\n \"software_level\",\n \"message_number\",\n \"message_subclass\",\n )\n if self._is_ct25k():\n indices = [1, 3, 4, 6, 7, 8]\n else:\n indices = [1, 3, 4, 7, 8, 9]\n values = [split_string(line, indices) for line in lines]\n return values_to_dict(fields, values)", "def _parse_xml(self):\n self.properties = {}\n pages = self.root.findall('page')\n self.pages = {} \n\n for page_num, page in enumerate(pages): \n\n _, _ , width, height = page.attrib[\"bbox\"].split(\",\")\n width, height = float(width), float(height)\n \n page_object = {\"page\": page_num + 1 , \"width\": width, \"height\": height} \n lines = self.root.findall('page[@id=\\'{}\\']/textbox/textline'.format(page_num+1)) \n print(\"{} Number of Lines in Page {}\".format(len(lines), page_num))\n \n self.bbox = {'x1': [] , 'y1':[], 'x2':[], 'y2':[]}\n textlines = self.root.findall('page[@id=\\'{}\\']/textbox/textline'.format(page_num+1)) \n textlines = sorted(textlines, key= lambda x: -float(x.attrib['bbox'].split(',')[3]))\n \n \n line_objects = []\n for idx, item in enumerate(textlines):\n item_props = self._extract_textline_properties(item)\n bbox = item.attrib['bbox'].split(',')\n item_props[\"x0\"] = Decimal(bbox[0])\n item_props[\"x1\"] = Decimal(bbox[2])\n item_props[\"y0\"] = Decimal(bbox[1])\n item_props[\"y1\"] = Decimal(bbox[3])\n item_props[\"top\"] = Decimal(height - float(bbox[3]))\n item_props[\"bottom\"] = Decimal(height - float(bbox[1]))\n\n line_objects.append(item_props)\n page_object[\"lines\"] = line_objects\n \n \n others = [] \n# for key in [\"rect\", \"figure\", \"layout/textgroup\", \"curve\"]: \n for key in [\"curve\", \"rect\", \"figure\"]: \n other_objs = self.root.findall('page[@id=\\'{}\\']/{}'.format(page_num+1, key)) \n for idx, item in enumerate(other_objs):\n \n item_props = {\"type\": key}\n# print(key, ET.tostring(item))\n bbox = item.attrib['bbox'].split(',')\n item_props[\"x0\"] = Decimal(bbox[0])\n item_props[\"x1\"] = Decimal(bbox[2])\n item_props[\"y0\"] = Decimal(bbox[1])\n item_props[\"y1\"] = Decimal(bbox[3]) \n item_props[\"top\"] = Decimal(height - float(bbox[3]))\n item_props[\"bottom\"] = Decimal(height - float(bbox[1]))\n others.append(item_props)\n \n page_object[\"others\"] = others\n page = Page(page_object)\n page_object[\"para\"] = page.para\n page_object[\"plines\"] = page.lines\n page_object[\"bigbox\"] = page.bigbox\n page_object[\"components\"] = page.components\n\n self.pages[page_num+1] = page_object", "def process_lines(in_lines):\n\n out_lines = list()\n\n line_num = 0\n for orig_line in in_lines:\n orig_line = orig_line.rstrip()\n line_num += 1\n\n line = orig_line.strip()\n line = re.sub('\\s*#.*', '', line)\n\n # if there is nothing left after stripping out the comments\n # and whitespace, then just move on to the next line\n #\n if not line:\n continue\n\n components = line.split()\n hostname = components[0]\n if len(components) > 1:\n netmask = components[1]\n elif re.search('/', hostname):\n # Look for a maskwidth instead of a netmask.\n # If we find one, convert it to a netmask.\n try:\n (hostname, maskwidth_str) = hostname.split('/')\n\n maskwidth = int(maskwidth_str)\n assert (maskwidth >= 0) and (maskwidth <= 32)\n\n netmask = VALID_NETMASKS[maskwidth]\n except BaseException:\n print 'Error on line %d [%s]' % (line_num, orig_line)\n raise ValueError('Invalid host or netmask specification')\n else:\n netmask = '255.255.255.255'\n\n if not netmask in VALID_NETMASKS:\n print 'Error on line %d [%s]' % (line_num, orig_line)\n raise ValueError('Invalid host or netmask specification')\n\n addrs = []\n try:\n addrs = socket.getaddrinfo(hostname, None, socket.AF_INET,\n socket.SOCK_STREAM, socket.SOL_TCP)\n\n # Can getaddrinfo succeed but return no addresses?\n # Better safe than sorry.\n #\n if len(addrs) == 0:\n raise ValueError('no addr found for [%s]' % hostname)\n\n except BaseException:\n print 'Error on line %d [%s]' % (line_num, orig_line)\n raise ValueError('Unknown hostname [%s]' % hostname)\n\n for addr in addrs:\n ipnum = addr[4][0] # depends on the getaddrinfo return format\n\n try:\n addr_obj = ipaddr.IPv4Network(\n '%s/%d' % (ipnum, VALID_NETMASKS.index(netmask)))\n out_lines.append('%-15s %s\\n' %\n (str(addr_obj.ip), str(addr_obj.netmask)))\n\n except BaseException:\n print 'Error on line %d [%s]' % (line_num, orig_line)\n raise ValueError('Bad address or netmask')\n\n return out_lines", "def process_paragraph( paragraph ):\n\t# Lists of bounding boxes, text, and probabilities\n\tline_box_list = []\n\tline_text_list = []\n\tline_prob_list = []\n\n\t# Line under processing\n\tcurrent_line_text = []\n\tcurrent_line_prob = []\n\t# Bounding box temporary variables\n\tx1 = 100000\n\ty1 = 100000\n\tx2 = 0\n\ty2 = 0\n\n\tfor word in paragraph.words:\n\t\tfor symbol in word.symbols:\n\t\t\t# x1, y1 (Left upper corner)\n\t\t\tif symbol.bounding_box.vertices[0].x < x1:\n\t\t\t\tx1 = symbol.bounding_box.vertices[0].x\n\t\t\tif symbol.bounding_box.vertices[0].y < y1:\n\t\t\t\ty1 = symbol.bounding_box.vertices[0].y\n\t\t\tif symbol.bounding_box.vertices[1].y < y1: \n\t\t\t\ty1 = symbol.bounding_box.vertices[1].y\n\t\t\tif symbol.bounding_box.vertices[3].x < x1:\n\t\t\t\tx1 = symbol.bounding_box.vertices[3].x\n\t\t\t# x2, y2 (right lower corner)\n\t\t\tif symbol.bounding_box.vertices[2].x > x2:\n\t\t\t\tx2 = symbol.bounding_box.vertices[2].x\n\t\t\tif symbol.bounding_box.vertices[2].y > y2:\n\t\t\t\ty2 = symbol.bounding_box.vertices[2].y\n\t\t\tif symbol.bounding_box.vertices[1].x > x2:\n\t\t\t\tx2 = symbol.bounding_box.vertices[1].x\n\t\t\tif symbol.bounding_box.vertices[3].y > y2:\n\t\t\t\ty2 = symbol.bounding_box.vertices[3].y\n\n\t\t\tcurrent_line_text.append( symbol.text )\n\t\t\tcurrent_line_prob.append( symbol.confidence )\n\t\t\t# Check for blank spaces\n\t\t\tif symbol.property.detected_break.type in [ breaks.SPACE, breaks.SURE_SPACE ]:\n\t\t\t\tcurrent_line_text.append( ' ' )\n\t\t\t\tcurrent_line_prob.append( 0.95 )\n\t\t\t# Check for new lines\n\t\t\tif symbol.property.detected_break.type in [ breaks.EOL_SURE_SPACE, breaks.HYPHEN, breaks.LINE_BREAK ]:\n\t\t\t\tline_box_list.append( [x1, y1, x2, y2] )\n\t\t\t\tline_text_list.append( current_line_text )\n\t\t\t\tline_prob_list.append( current_line_prob )\n\t\t\t\t# Line under processing\n\t\t\t\tcurrent_line_text = []\n\t\t\t\tcurrent_line_prob = []\n\t\t\t\t# Bounding box temporary variables\n\t\t\t\tx1 = 100000\n\t\t\t\ty1 = 100000\n\t\t\t\tx2 = 0\n\t\t\t\ty2 = 0\n\n\treturn( line_box_list, line_text_list, line_prob_list )", "def processText(text):\n print(type(text))\n for line in text:\n print(line)\n return text", "def process_message(self, mailfrom, rcpttos, data):\n\n\t\t#Process message received\n\t\tprint mailfrom, rcpttos, data", "def processFaxbotMessage(self, txt):\r\n with self.__lock:\r\n if \"I do not understand your request\" in txt:\r\n replyTxt = (\"FaxBot does not have the requested monster '{}'. \"\r\n \"(Check the list at {} )\"\r\n .format(self._lastRequest, self.fax_list_url)) \r\n self._lastRequest = None\r\n self._lastRequestTime = None\r\n return replyTxt\r\n if \"just delivered a fax\" in txt:\r\n self._lastRequest = None\r\n self._lastRequestTime = None\r\n return (\"FaxBot received the request too early. \"\r\n \"Please try again.\")\r\n if \"try again tomorrow\" in txt:\r\n self._noMoreFaxesTime = utcTime()\r\n txt = (\"I'm not allowed to request any more faxes today. \"\r\n \"Request manually with /w FaxBot {}\"\r\n .format(self._lastRequest))\r\n self._lastRequest = None\r\n self._lastRequestTime = utcTime()\r\n return txt\r\n m = re.search(r'has copied', txt)\r\n if m is not None:\r\n self._lastRequest = None\r\n self._lastRequestTime = None\r\n self._lastFaxBotTime = utcTime()\r\n # suppress output from checkForNewFax since we are returning\r\n # the text, to be output later\r\n return self.checkForNewFax(False)\r\n self._lastRequest = None\r\n self._lastRequestTime = None\r\n return \"Received message from FaxBot: {}\".format(txt)", "def parse_records(self):\n for record in sp.parse(gzip.open(\n \"./human_uniprot_04_07_20.gz\", 'rt')):\n # print(record.taxonomy_id)\n # if record.organism != \"Homo sapiens\":\n # continue\n # print(record.features[0])\n # for comment in record.comments:\n # if comment.startswith(\"SUBCELLULAR LOCATION\"):\n # print(comment)\n self.extract_features_to_dict(record)\n self.extract_localization(record)", "def __init__(self, line):\n (self.timestamp, self.status_code, self.content_length, self.url, self.hop_path, self.via,\n self.mime, self.thread, self.start_time_plus_duration, self.hash, self.source,\n self.annotation_string) = re.split(\" +\", line.strip(), maxsplit=11)\n # Account for any JSON 'extra info' ending, strip or split:\n if self.annotation_string.endswith(' {}'):\n self.annotation_string = self.annotation_string[:-3]\n elif ' {\"' in self.annotation_string and self.annotation_string.endswith('}'):\n self.annotation_string, self.extra_json = re.split(re.escape(' {\"'), self.annotation_string, maxsplit=1)\n self.extra_json = '{\"%s' % self.extra_json\n # And split out the annotations:\n self.annotations = self.annotation_string.split(',')\n\n # Some regexes:\n self.re_ip = re.compile('^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$')\n self.re_tries = re.compile('^\\d+t$')\n self.re_dol = re.compile('^dol:\\d+') # Discarded out-links - make a total?", "def process_entry(self):\n\n for line_item in self.entry:\n pairs = line_item.split(' ')\n for pair in pairs:\n if ':' in pair:\n key, value = pair.split(':')\n if value.isdigit():\n self.fields[key] = int(value)\n else:\n self.fields[key] = value", "def process(raw):\n entry = { }\n cooked = [ ]\n\n for line in raw:\n line = line.strip()\n if len(line) == 0 or line[0]==\"#\" :\n continue\n parts = line.split(';')\n if len(parts) == 3:\n entry[\"description\"] = parts[0].strip() #adding key and values to the dict\n entry[\"long\"] = parts[1].strip()\n entry[\"lat\"] = parts[2].strip()\n cooked.append(entry) #add this dict entry into the array\n entry = { }\n continue\n else:\n raise ValueError(\"Trouble wiht line: '{}'\\n\".format(line))\n \n return cooked #returning an array of dicts", "def _read_entry_detail(self, line):\n account_number = 0\n try:\n account_number = int(line[12:29].strip().replace('-', ''))\n except ValueError as err:\n print('Error parsing account number field -> ' + str(err))\n\n result_dict = {'Transaction Code': line[1:3],\n 'ReceivingID': line[3:11],\n 'CheckDigit': line[11],\n 'Account Number': account_number,\n 'Amount': int(line[29:39]) / 100,\n 'Individual ID': line[39:54],\n 'Receiver Name': line[54:76].strip(),\n 'DiscretionaryData': line[76:78],\n 'AddendaIndicator': line[78],\n 'TraceNumber': line[79:94]}\n\n self.entries.append(result_dict)", "def parse(self):\n\n lines = self._get_file_lines( )\n\n message, translation = None, None\n comment, status, sources = None, None, None\n temp_msgid, temp_msgstr = None, None\n previous, current = None, None\n\n tstore = UT3Store( )\n\n for curl in lines:\n\n curl = curl.strip( )\n\n if len(curl) == 0:\n current = LINE_EMPTY\n elif curl[0] == '#':\n current = LINE_COMMENT\n status, comment, sources = \\\n _extract_comment_values(curl, status, comment, sources)\n else:\n keyword, message = _parse_line(curl)\n if keyword is not None:\n if keyword == \"msgid\":\n current = LINE_MSGID\n # and now initialise them for later use\n temp_msgid = message\n temp_msgstr = \"\"\n elif keyword == \"msgstr\":\n current = LINE_MSGSTR\n temp_msgstr = message\n else:\n current = LINE_UNKNOWN\n logging.error(\"unknown keyword: %s\" % (keyword))\n else:\n if message is not None:\n if current == LINE_MSGID:\n temp_msgid = temp_msgid + message\n elif current == LINE_MSGSTR:\n temp_msgstr = temp_msgstr + message\n else:\n logging.error(\"unknown mode\")\n\n if previous == LINE_MSGSTR and current != LINE_MSGSTR:\n # we're not in msgstr mode anymore --> save the current entry\n entry = _make_item(message, translation, \\\n sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n # reset the item values\n message, translation = None, None\n comment, status, sources = None, None, None\n\n # save msgid and msgstr for storing them later\n message = temp_msgid\n translation = temp_msgstr\n # save line state\n previous = current\n\n # finally append the last pair\n if previous == LINE_MSGSTR:\n entry = _make_item(message, translation, sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n return tstore", "def domain_parser():\n\tinput_handle=\"blast_hits.fasta\"\n\t# File with: (1) the domains of each hit and (2) domains information\n\tdomains= \"domains_hits.txt\"\n\toutput_handle = open(domains, \"w\")\n\toutput_handle.write(\"#This file contains the domains of each hit.\\n#At the bottom, you will find detail information of all the domains detected.\\n\")\n\toutput_handle.write(\"#We strongly recommend to open this file with Visual Studio Code.\\n#Because when the names of the domains are too large, in regular editors the table looks awful.\\n\")\n\toutput_handle.write(\"#Here it is only showed how many times a pattern is present.\\n#In the figure of the domains you will find the position of each domain.\\n\\n\")\n\taccession_list=[] \t# List of prosite.doc accessions of the domains that had been found\n\tdomains_dict=dict() # dictionary that saves matches\n\tcount=1\n\tmax_seq_len=0 # Keep larger sequence to plot x-axe\n\t# Loop to go through hits\n\tfor seq_record in SeqIO.parse(input_handle, \"fasta\"):\n\t\toutput_handle.write(str(seq_record.id)+\"\\n\") # print identifier of the hit\n\t\toutput_handle.write(str(seq_record.seq)+\"\\n\") # print sequence of the hit\n\t\tif len(seq_record.seq)>max_seq_len:\n\t\t\tmax_seq_len=len(seq_record.seq)\n\t\t# Make a table for each hit with the domains, that contains the following fields: name, accession, description and pattern\n\t\tx=PrettyTable()\n\t\tx.field_names=[\"name\",\"accession\",\"description\",\"pattern\",\"repetitions\"]\n\n\t\t# Loop to go through prosite domains\n\t\thandle = open(\"prosite.dat\",\"r\")\n\t\trecords = Prosite.parse(handle)\n\t\tfor record in records:\n\t\t\t# prosite.dat preparation for parsing\n\t\t\t# {} -> [^]\n\t\t\tpattern = record.pattern.upper()\n\t\t\tpattern = pattern.replace(\"{\", \"[^\")\n\t\t\tpattern = pattern.replace(\"}\", \"]\")\t\n\t\t\t# - -> \"\"\n\t\t\tpattern = pattern.replace(\"-\", \"\")\t\n\t\t\t# . -> \"\"\n\t\t\tpattern = pattern.replace(\".\", \"\")\t\n\t\t\t# X|x -> \"[ARNDCQEGHILKMFPSTWYV]\"\n\t\t\tAAS=\"[ARNDCQEGHILKMFPSTWYV]\"\n\t\t\tpattern = pattern.replace(\"x\", AAS)\n\t\t\tpattern = pattern.replace(\"X\", AAS)\t\n\t\t\t# () -> {}\n\t\t\tpattern = pattern.replace(\"(\", \"{\")\n\t\t\tpattern = pattern.replace(\")\", \"}\")\t\n\n\t\t\t# >] -> ]?$\n\t\t\tpattern = pattern.replace(\">]\", \"]?$\")\t\n\n\t\t\t# < -> ^\n\t\t\t# > -> $\n\t\t\tpattern = pattern.replace(\"<\", \"^\")\t\n\t\t\tpattern = pattern.replace(\">\", \"$\")\t\n\t\t\tif pattern != \"\":\n\t\t\t\t# Look if the hit contains the current patter\n\t\t\t\tif re.search(r\"\"+str(pattern), str(seq_record.seq).upper()): # if found\n\t\t\t\t\tif record.pdoc not in accession_list:\n\t\t\t\t\t\t# Save pdoc accession in the list of prosite.doc accessions\n\t\t\t\t\t\t# if it is not already\n\t\t\t\t\t\taccession_list.append(record.pdoc)\n\t\t\t\t\tmatches = re.finditer(r\"\"+str(pattern), str(seq_record.seq).upper())\n\t\t\t\t\treps=0\n\t\t\t\t\tfor match in matches: # save all matches in a dictionary to plot them later\n\t\t\t\t\t\tdomains_dict[count]=[seq_record.id, len(seq_record.seq),record.name,match.start(),match.end()]\n\t\t\t\t\t\tcount=count+1\n\t\t\t\t\t\treps=reps+1\n\t\t\t\t\tx.add_row([record.name,record.accession,record.description,record.pattern, reps]) # add found domain to table\n\n\t\toutput_handle.write(str(x)+\"\\n\") # add table of hit to domains_hits.txt\n\n\t# At the end of the tables, print information of all the domains that had been found\n\toutput_handle.write(\"\\n\")\n\trecord_text_list=DocParser(accession_list)\n\tfor text in record_text_list:\n\t\toutput_handle.write(text)\n\treturn (domains_dict,max_seq_len,accession_list)", "def process_to_text(rawfile, txtfile, field: int=None):\n\n if not os.path.exists(txtfile) or os.path.getsize(txtfile) == 0:\n sacrelogger.info(\"Processing %s to %s\", rawfile, txtfile)\n if rawfile.endswith('.sgm') or rawfile.endswith('.sgml'):\n with smart_open(rawfile) as fin, smart_open(txtfile, 'wt') as fout:\n for line in fin:\n if line.startswith('<seg '):\n print(_clean(re.sub(r'<seg.*?>(.*)</seg>.*?', '\\\\1', line)), file=fout)\n elif rawfile.endswith('.xml'): # IWSLT\n with smart_open(rawfile) as fin, smart_open(txtfile, 'wt') as fout:\n for line in fin:\n if line.startswith('<seg '):\n print(_clean(re.sub(r'<seg.*?>(.*)</seg>.*?', '\\\\1', line)), file=fout)\n elif rawfile.endswith('.txt'): # wmt17/ms\n with smart_open(rawfile) as fin, smart_open(txtfile, 'wt') as fout:\n for line in fin:\n print(line.rstrip(), file=fout)\n elif rawfile.endswith('.tsv'): # MTNT\n with smart_open(rawfile) as fin, smart_open(txtfile, 'wt') as fout:\n for line in fin:\n print(line.rstrip().split('\\t')[field], file=fout)", "def parse_reports(self):\n txt = (\n self.unixtext\n if self.unixtext[:2] != \"\\001\\n\"\n else self.unixtext[2:]\n )\n\n lines = txt.split(\"\\n\")\n # There may be an AWIPSID in line 3 or silly aviation control char\n pos = 3 if len(lines[2]) < 10 or lines[2].startswith(\"\\x1e\") else 2\n meat = \"\".join(lines[pos:])\n for report in meat.split(\"=\"):\n if report.strip() == \"\":\n continue\n res = self.process_pirep(\" \".join(report.strip().split()))\n if res is not None:\n self.reports.append(res)", "def processLayoutText(self, layoutText):## This function is being called from the __init__ function of the Layout class.\n maxY = self.height - 1 # So that maxY was reduced by -1. This is critical as the loop below reduces maxY by y. The reduction is necessary as self.height was determined by len(layoutText) which counts all the elements but the for loop for the 'y' variable' uses range(self.height) which counts from '0'. maxY means that we are countin through the 2-dimensional array columns from the back to the front.\n for y in range(self.height): ## Why are we counting through the text elements in reverse? Because the __str__function of the Grid class returned the out variable which contains the GRID in reverse.\n for x in range(self.width): ### PLEASE NOTE! The need for reversing this is that we WANT TO LOOK AT THIS IN A COORDINATE FORMAT WITH (0,0) representing the bottom left corner.\n layoutChar = layoutText[maxY - y][x] #Passes the layout character ('%' '.' or 'o' --> see above or layout file) to the layoutChar variable. This is done in a 'flipped mannor from the input format to the (x,y) convention.\n ## Based on the 2D array the (visualized in file Grid_text) the layoutChar variable assumes the following values:\n self.processLayoutChar(x, y, layoutChar) # layoutChar is based on the variable layout Text: [%.%OG%,.%OG%.%,%%%%%%,%....%] with each position to be submited one-by-one based on the nested for loops. This maps the 2-dimentional Grid created in the __init__function and changes the boolean values to suit the layout of the board. See example in 'processLayoutChar' function below.\n self.agentPositions.sort()\n #print(self.agentPositions)\n self.agentPositions = [ ( i == 0, pos) for i, pos in self.agentPositions] #This basically creates a list of tuples containing the coordinates of the agents.\n #print(self.agentPositions)\n #print(self.capsules)\n #print(self.numGhosts)", "def process_text(self, text, language):", "def parse_display_lines(self):\n is_on = None\n source_name = None\n volume = None\n mute_on = None\n party_mode_on = None\n info = None\n rec_source = None\n zone2_source = None\n zone2_volume = None\n zone3_source = None\n zone3_volume = None\n zone4_source = None\n zone4_volume = None\n\n line0 = self.lines[0]\n if len(line0) != 21:\n _LOGGER.error(\"Display line 1 must be exactly 21 bytes\")\n if (\n line0\n == \"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\n ):\n is_on = False\n else:\n is_on = True\n source_name = line0[:8].rstrip()\n party_mode_on = line0[10:13] == \"pty\"\n vol_str = line0[14:]\n if (vol_str == \"MUTE ON\") or (vol_str == \" \"):\n mute_on = True\n volume = None\n elif vol_str[0:3] != \"VOL\":\n _LOGGER.error(\"Could not verify VOL string: %s\", vol_str)\n else:\n mute_on = False\n volume = int(vol_str[3:])\n\n line1 = self.lines[1]\n if len(line1) != 21:\n _LOGGER.error(\"Display line 2 must be exactly 21 bytes\")\n if (\n line1\n == \"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\n ):\n pass\n else:\n info = line1.strip().replace(\"\\x19\", \"II\")\n if line1[:9] == \" REC \":\n rec_source = line1[9:].rstrip()\n elif line1[:9] == \" ZONE2 \":\n zone2_source = line1[9:].rstrip()\n elif line1[:14] == \" ZONE2 VOL \":\n zone2_volume = int(line1[14:16])\n elif line1[:9] == \" ZONE3 \":\n zone3_source = line1[9:].rstrip()\n elif line1[:14] == \" ZONE3 VOL \":\n zone3_volume = int(line1[14:16])\n elif line1[:9] == \" ZONE4 \":\n zone4_source = line1[9:].rstrip()\n elif line1[:14] == \" ZONE4 VOL \":\n zone4_volume = int(line1[14:16])\n\n return {\n \"is_on\": is_on,\n \"source_name\": source_name,\n \"volume\": volume,\n \"mute_on\": mute_on,\n \"party_mode_on\": party_mode_on,\n \"info\": info,\n \"rec_source\": rec_source,\n \"zone2_source\": zone2_source,\n \"zone2_volume\": zone2_volume,\n \"zone3_source\": zone3_source,\n \"zone3_volume\": zone3_volume,\n \"zone4_source\": zone4_source,\n \"zone4_volume\": zone4_volume,\n }", "def parse(self, lines):\n # convert file to string deleting end of line charcters\n citations_string = self.prepare_file(lines)\n # extract the entries from the string\n entries = list(self.find_entries(citations_string))\n entries.append(len(citations_string))\n # parse each entry to generate a citation\n for idx, jdx in zip(entries[:-1], entries[1:]):\n self.parse_entry(citations_string[idx:jdx])\n return self.force_field.citations", "def build_mails(user_data):\n mails = []\n print(\"Building texts\")\n for data in user_data:\n missingdata = data['datosquefaltan'].strip()\n if missingdata and missingdata != \"-\":\n missing = MISSING.format(missing_items=missingdata)\n else:\n missing = \"\"\n\n payment_key = (data['tiposocio'], data['formadepago'])\n print(\" \", payment_key, repr(missingdata))\n pago = ALL_PAYMENTS[payment_key]\n\n data.update(missing=missing, pago=pago)\n text = MAIN_TEXT.format(**data)\n\n recipient = \"{} {} <{}>\".format(data['nombre'], data['apellido'], data['email'])\n mails.append((recipient, text))\n\n return mails", "def process_raw_text(self, file_name, column_side):\n self.mvc_check()\n\n model_txt = None\n if column_side == LEFT_TEXT:\n model_txt = self.model.txt1\n elif column_side == RIGHT_TEXT:\n model_txt = self.model.txt2\n\n model_txt.open_raw(file_name)\n model_txt.process_raw()\n self.opened_txt[column_side] = True\n self.can_align = self.opened_txt[LEFT_TEXT] and self.opened_txt[RIGHT_TEXT]\n\n # Goldsmith\n model_txt.make_trie(column_side)\n model_txt.apply_goldsmith(1.1, 20, column_side)\n\n # Associate word for alignment if both text were opened\n if self.can_align:\n for view in self.views:\n view.end_task()\n view.change_task(\"Associating words\")\n self.model.associate_words(1.5)\n for view in self.views:\n view.end_task()\n\n # TODO : coherent saving to database using model.save_data\n\n return model_txt.str", "def detect(self, numbered_lines):\n numbered_lines = list(numbered_lines)\n numbers = [n for n, _l in numbered_lines]\n start_line = min(numbers)\n end_line = max(numbers)\n # logger.debug('CopyrightDetector:detect:lines numbers: %(start_line)d->%(end_line)d' % locals())\n tokens = self.get_tokens(numbered_lines)\n\n # we accumulate detected items in these synchronized lists\n # this could be a single list of namedtuples\n # or a list of dicts instead\n copyrights, authors, years, holders = [], [], [], []\n\n if not tokens:\n return copyrights, authors, years, holders, None, None\n\n # OPTIMIZED\n copyrights_append = copyrights.append\n authors_append = authors.append\n years_append = years.append\n holders_append = holders.append\n\n # first, POS tag each token using token regexes\n tagged_text = self.tagger.tag(tokens)\n logger.debug('CopyrightDetector:tagged_text: ' + str(tagged_text))\n\n # then build a parse tree based on tagged tokens\n tree = self.chunker.parse(tagged_text)\n logger.debug('CopyrightDetector:parse tree: ' + str(tree))\n\n # OPTIMIZED\n nltk_tree_Tree = nltk.tree.Tree\n CopyrightDetector_as_str = CopyrightDetector.as_str\n\n def collect_year_and_holder(detected_copyright):\n \"\"\"\n Walk the a parse sub-tree starting with the `detected_copyright`\n node collecting all years and holders.\n \"\"\"\n for copyr in detected_copyright:\n if isinstance(copyr, nltk_tree_Tree):\n # logger.debug('n: ' + str(copyr))\n node_text = CopyrightDetector_as_str(copyr)\n copyr_label = copyr.label()\n if 'YR-RANGE' in copyr_label:\n years_append(refine_date(node_text))\n elif 'NAME' == copyr_label or 'COMPANY' in copyr_label:\n # FIXME : this would wreck things like 23andme\n # where a company name contains numbers\n holders_append(refine_author(node_text))\n # logger.debug('CopyrightDetector: node_text: ' + node_text)\n collect_year_and_holder(copyr)\n\n # then walk the parse tree, collecting copyrights, years and authors\n for tree_node in tree:\n if isinstance(tree_node, nltk_tree_Tree):\n node_text = CopyrightDetector_as_str(tree_node)\n tree_node_label = tree_node.label()\n if 'COPYRIGHT' in tree_node_label:\n if node_text and node_text.strip():\n refined = refine_copyright(node_text)\n if not is_junk(refined):\n copyrights_append(refined)\n collect_year_and_holder(tree_node)\n elif tree_node_label == 'AUTHOR':\n authors_append(refine_author(node_text))\n\n return copyrights, authors, years, holders, start_line, end_line", "def processClauseText(intext, mtype): # type: (str, str) -> []\n\n global classifications, accsearch, unaccsearch\n\n retlist = []\n texts = []\n if mtype == 'text' or mtype == 'txt':\n texts = extractClauses(intext)\n elif 'pdf' in mtype:\n plaintext = parsepdf(intext)\n texts = extractPDFClauses(plaintext)\n elif 'word' in mtype:\n plaintext = parseword(intext)\n texts = extractClauses(plaintext)\n\n results = predicteula(texts)\n if len(results['prediction']) == 0:\n return retlist\n\n accs = [[win[1] for win in curwins] for curwins in results['windows']]\n probs = [max(curwins) for curwins in accs]\n\n for text, prediction in zip(texts, probs):\n if len(text.split()) > 9:\n curclause = {}\n curclause['origclause'] = text\n curclause['classification'] = 'Not Sure'\n if prediction > 0.6:\n curclause['classification'] = 'Acceptable'\n elif prediction < 0.4:\n curclause['classification'] = 'Unacceptable'\n curclause['score'] = prediction\n curclause['accclause'] = dicesearch(text, accsearch)\n curclause['unaccclause'] = dicesearch(text, unaccsearch)\n retlist.append(curclause)\n\n return retlist", "def measureAll(authors_texts,sectorialized_agents):\n authors_texts=P.text.aux.textFromAuthors(authors_texts,self.topm_dict[\"sectorialized_agents\"])\n authors_measures={}\n # análise de cada mensagem e de cada autor\n for author in authors_texts:\n authors_measures[author]={}\n texts=authors_texts[author]\n authors_measures[author][\"raw_strings\"]=P.text.raw.analyseAll(texts)\n authors_measures[author][\"pos\"]= P.text.pos.analyseAll(authors_analysis[author][\"raw_analysis\"])\n authors_measures[author][ \"wordnet\" ]=P.text.wordnet.analyseAll(authors_analysis[author][\"pos_analysis\"])\n authors_measures[author][\"tfIdf\"]=P.text.tfIdf.analyseAll(texts) # tfIdf de cada texto e do autor, numeric: mean e std das distancias\n # análise de cada setor e da estrutura toda\n# sectors_texts=P.text.aux.textFromSectors(authors_text,sectorialized_agents)\n sectors_measures={}\n for sector in sectorialized_agents:\n sectors_measures[sector][\"raw_strings\"]=P.text.raw.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n sectors_measures[sector][\"pos\"]= P.text.pos.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n sectors_measures[sector][\"wordnet\"]= P.text.wordnet.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n # tfIdf de cada texto e de cada autor, numeric: mean e std das distancias por texto e por autor, e media e etd dos autores\n sectors_measures[sector][\"tfIdf\"]= P.text.tfIdf.sectorsAnalyseAll(authors_analysis,sectorialized_agents[sector])\n\n# texts=[sectors_texts[i] for i in (\"peripherals\",\"intermediaries\",\"hubs\")]\n# sectors_analysis[\"raw_strings\"]=P.text.raw.analyseAll(texts)\n# sectors_analysis[\"pos\"]= P.text.pos.analyseAll(sectors_analysis[\"raw_analysis\"])\n# sectors_analysis[ \"wordnet\" ]=P.text.wordnet.analyseAll(sectors_analysis[\"pos_analysis\"])\n# sectors_analysis[\"tfIdf\"]=P.text.tfIdf.tfIdf(texts)\n\n overall_measures[\"raw_strings\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n overall_measures[\"pos\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n overall_measures[\"wordnet\"]=P.text.raw.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n # tfIdf measurespor texto, autor e setor, numeric: media e desvio das distancias por cada grupo, media e desvio dos setores e dos autores\n overall_measures[\"tfIdf\"]=P.text.tfIdf.systemAnalysis(sectors_analysis) # medias de toda a rede por mensagem, por autor e por setor\n\n del authors_texts,sectorialized_agents,author, sector\n return locals()", "def main_email(name, total, answered, not_answered, declines, remaining):\n\n start = smtplib.SMTP(host=HOST, port=PORT)\n start.starttls()\n start.login(ADDRESS, PASSWORD)\n\n date = datetime.datetime.now()\n date_now = date.strftime(\"%m-%d-%Y\")\n\n print_list, email_dict = simple_contacts('contacts.txt')\n\n emails = get_emails(print_list, email_dict)\n\n message_template = read_template()\n\n for mail in emails:\n pretty_print(f\"Sending email to {mail}\", \"!\")\n msg = MIMEMultipart()\n\n message = message_template.substitute(PERSON_NAME=name, DATE=date_now, TOTAL_CALLED=total, ANSWERED=answered, NOT_ANSWERED=not_answered, DECLINES=declines, REMAINING=remaining)\n\n msg['From'] = ADDRESS\n msg['To'] = mail\n msg['Subject'] = f\"{name} - Calling Campaign Summary - {date_now}\"\n\n msg.attach(MIMEText(message, 'plain'))\n start.send_message(msg)\n pretty_print(f\"Mail sent to {mail}\", \"!\")\n\n del msg\n\n start.quit()", "def contactable_fields(agency, office_dict):\n agency.phone = clean_phone(office_dict.get('phone'))\n # a.toll_free_phone - not an explicit field in our data set\n agency.emails = office_dict.get('emails', [])\n agency.fax = clean_phone(office_dict.get('fax'))\n agency.office_url = office_dict.get('website')\n\n agency.request_form_url = office_dict.get('request_form')\n\n service_center = office_dict.get('service_center', '')\n match = TTY_RE.search(service_center)\n if match:\n agency.TTY_phone = match.group(0)\n # Hack until we fix the underlying data\n if ', Phone:' in service_center:\n name = service_center[:service_center.index(', Phone:')]\n else:\n name = service_center\n agency.person_name = name or None\n\n public_liaison = office_dict.get('public_liaison', '')\n # Hack until we fix the underlying data\n if ', Phone:' in public_liaison:\n name = public_liaison[:public_liaison.index(', Phone:')]\n phone = public_liaison[public_liaison.index('Phone:'):]\n phone = phone[len('Phone:'):].strip()\n # Remove TTY, if present\n match = TTY_RE.search(phone)\n if match:\n phone = phone[:match.start()].strip()\n agency.public_liaison_phone = clean_phone(phone)\n else:\n name = public_liaison\n agency.public_liaison_name = name or None\n\n address = office_dict.get('address', [])\n if address:\n match = ADDY_RE.match(address[-1])\n if match:\n agency.zip_code = match.group('zip')\n agency.state = match.group('state')\n agency.city = match.group('city')\n\n if len(address) > 1:\n agency.street = address[-2]\n if len(address) > 2:\n agency.address_lines = address[0:-2]\n reading_rooms = office_dict.get('reading_rooms', [])\n if reading_rooms:\n add_reading_rooms(agency, reading_rooms)", "def process_image( img_path_filename, output_dir_name, output_path_filename):\n\t########################### Google OCR #############################\n\tclient = vision.ImageAnnotatorClient()\n\n\tlines_boxes_img = []\n\tlines_texts_img = []\n\tlines_probs_img = []\n\n\t# Path + Base name for the block files\n\tfilename = img_path_filename.split('/')[-1]\n\tbasename = filename.split('.')[0]\n\n\tcontent = None\n\twith io.open( img_path_filename, 'rb' ) as image_file:\n\t\tcontent = image_file.read()\n\n\ttry:\n\t\t# Process image and recognize its parts and text\n\t\timage = types.Image( content=content )\n\t\tresponse = client.document_text_detection(image=image)\n\t\tdocument = response.full_text_annotation\n\n\t\tfulltext_path_filename = output_dir_name + \"/\" + basename + \".txt\"\t\n\t\t# Save all the extracted text in a text file\n\t\twith open( fulltext_path_filename,'w') as f:\n\t\t\tf.write( response.full_text_annotation.text )\n\n\t\t# Collect the lines, their probabilities, and their bounding boxes\n\t\tfor page in document.pages:\n\t\t\tfor block in page.blocks:\n\t\t\t\tfor paragraph in block.paragraphs:\n\t\t\t\t\t# Divide the paragraph in lines and get its lines, bounding boxes, and symbols' probabilities\n\t\t\t\t\tlines_boxes_par, lines_texts_par, lines_probs_par = process_paragraph( paragraph )\n\t\t\t\t\t# Extend the line lists\n\t\t\t\t\tlines_boxes_img.extend( lines_boxes_par )\n\t\t\t\t\tlines_texts_img.extend( lines_texts_par )\n\t\t\t\t\tlines_probs_img.extend( lines_probs_par )\n\texcept Exception as e:\n\t\tprint(\"Error: \" + img_path_filename + \", \" + str(e))\n\t\treturn\n\n\t# Crop and save the image for each paragraph, its text files, and its probabilities files. It also returns the bbox statistics.\n\ttext_local, text_global = \"\", \"\"\n\ttext_local, text_global = crop_save( img_path_filename, lines_boxes_img, lines_texts_img, lines_probs_img, filename, basename, output_dir_name )\n\n\t# Save the bounding box information in the local and in the global file\n\tif text_global != \"\":\n\t\t# Save the data of the lines in the local text file\n\t\twith open(output_dir_name + \"/\" + basename + \"_lines.csv\", \"w+\") as f:\n\t\t\tf.write( text_local )\n\n\t\t# Save the data of the lines in the global text file\n\t\twith open(output_path_filename, \"a+\") as f:\n\t\t\tf.write( text_global )", "def buscarEnLineas(ipSrc, maskSrc, ipDst, maskDst, fileLines):\n\tipsAnteriores = {}\n\tipsActuales = {}\n\tfor actual in fileLines:\n\t\tmatches = re.search(\n\t\t\t# nro de hop ((nombre) IP del server RTT) o un * \n\t\t\t'\\s*([0-9]{,2})\\s+(?:(?:.+?\\s*\\(([0-9.]+)\\))\\s+((?:\\s*[0-9.]+\\s+ms)+)\\s+|\\*)$'\n\t\t\t, actual\n\t\t) #pueden ir a http://www.regexper.com/ y ver que significa\n\t\tif matches.group(1): #es un nuevo hop\n\t\t\thop = matches.group(1)\n\t\t\tipsAnteriores = ipsActuales\n\t\t\tipsActuales = {}\n\t\tip = matches.group(2)\n\t\tif ip:\t#me respondieron\n\t\t\tif masked(ip, maskDst) == masked(ipDst, maskDst) and ipsAnteriores.__contains__(masked(ipSrc, maskSrc)):\n\t\t\t\tparsearTiempos = lambda string: map(float, re.findall('(?:\\s*([0-9.]+)+\\s+ms)', string))\n\t\t\t\tpromedio = lambda l: sum(l)/len(l)\n\t\t\t\ttiempoAnterior = promedio(parsearTiempos(ipsAnteriores[masked(ipSrc, maskSrc)]))\n\t\t\t\ttiempoActual = promedio(parsearTiempos(matches.group(3)))\n\t\t\t\trtt = tiempoActual - tiempoAnterior\n\t\t\t\treturn hop, rtt\n\t\t\telse:\n\t\t\t\tif ipsActuales.__contains__(masked(ip, maskDst)):\n\t\t\t\t\tipsActuales[masked(ip, maskSrc)] += ' ' + matches.group(3)\t#te quiero python\n\t\t\t\telse:\n\t\t\t\t\tipsActuales[masked(ip, maskSrc)] = matches.group(3)\n\treturn -1, 0", "def __getAddresses(parsed: BeautifulSoup) -> list:\n\n # Addresses container\n address_divs = parsed.find_all('div', class_='mailer')\n\n # Building RegEx for phone number\n # The following RegEx extracts phone numbers in the following formats:\n # 1. (###) ###-####\n # 2. ###-###-####\n # 3. ##########\n phone_number_regex = re.compile(\n r'(\\(\\d{3}\\) \\d{3}-\\d{4}|\\d{3}-\\d{3}-\\d{4}|\\d{10})')\n\n # List for final addresses\n addresses = list()\n\n for address in address_divs:\n # Create dict for address\n address_parsed = dict()\n # Split text by newline\n address_items = address.text.split('\\n')\n # Removing leading and trailing spaces\n address_items = [i.strip() for i in address_items]\n\n # Variable to store street address\n street_address = ''\n\n # Iterate through each line\n for idx, address_item in enumerate(address_items):\n # First line is address type\n if idx == 0:\n address_parsed['type'] = address_item\n continue\n\n # Check if line has phone number\n phone_matches = phone_number_regex.findall(address_item)\n if len(phone_matches) == 1:\n # Stripping non-digit characters from phone number\n phone_number = re.sub('[^0-9]', '', phone_matches[0])\n address_parsed['phone'] = phone_number\n continue\n \n # If no number, add to address line\n street_address += address_item.strip() + ' '\n \n # Adding street address to parsed address\n address_parsed['street_address'] = street_address.strip()\n\n # Adding parsed address to addresses master list\n addresses += [address_parsed]\n\n return addresses", "def split_full_text(self, full_text, headers_list):\n\n sectioned_text = {}\n indices = {}\n no_abstr = False\n\n for i, hd in enumerate(headers_list):\n #need to replace special regex characters before matching substrings\n if '(' in hd:\n hd = hd.replace('(', '\\(')\n\n if ')' in hd:\n hd = hd.replace(')', '\\)')\n\n if '[' in hd:\n hd = hd.replace('[', '\\[')\n\n if ']' in hd:\n hd = hd.replace(']', '\\]')\n\n if '{' in hd:\n hd = hd.replace('{', '\\{')\n\n if '}' in hd:\n hd = hd.replace('}', '\\}')\n\n if '+' in hd:\n hd = hd.replace('+', '\\+')\n\n if '*' in hd:\n hd = hd.replace('*', '\\*')\n\n if ':' in hd:\n hd = hd.replace(':', '\\:')\n\n if i == 0: # meta-data has no substring-matching to do\n\n inds = [m.start() for m in re.finditer(hd, full_text)]\n #Abstract can appear in text, but isn't listed w/ headers\n #Only use first instance\n if len(inds) > 0:\n indices[hd] = inds[0]\n\n else: #if there is no abstract, use figures to remove meta-data\n fig_text = [m.start() for m in re.finditer('Figure', full_text)]\n indices[hd] = fig_text[0]\n no_abstr = True\n\n else:\n inds = [m.start() for m in re.finditer(hd, full_text)]\n #assume final instance of substring match corresponds\n #to the correct header text instance\n indices[hd] = inds[-1]\n\n\n for i, hd in enumerate(headers_list):\n\n if i == 0:\n if no_abstr == True:\n\n #get meta-data, which has no keyword matching\n sectioned_text['Section Headers'] = headers_list\n end_ind = indices[' Abstract ']\n sectioned_text['Meta-data'] = full_text[:end_ind]\n\n #indicate there is no abstract\n start_id = indices[' Abstract ']\n end_id = indices[list(indices.keys())[1]]\n sectioned_text[' Abstract '] = ''\n\n\n if no_abstr == False:\n #get meta-data, which has no keyword matching\n sectioned_text['Section Headers'] = headers_list\n end_ind = indices[' Abstract ']\n sectioned_text['Meta-data'] = full_text[:end_ind]\n\n #get abstract\n start_id = indices[' Abstract ']\n end_id = indices[list(indices.keys())[1]]\n sectioned_text[hd] = full_text[start_id : end_id]\n\n if i > 0 and i < len(headers_list)-1: #all setions but final section\n if i == 1:\n if no_abstr == True:\n start_id = indices[' Abstract ']\n end_id = indices[list(indices.keys())[i+1]]\n sectioned_text[hd] = full_text[start_id:end_id]\n\n else:\n start_id = indices[list(indices.keys())[i]]\n end_id = indices[list(indices.keys())[i+1]]\n sectioned_text[hd] = full_text[start_id:end_id]\n\n else:\n start_id = indices[list(indices.keys())[i]]\n end_id = indices[list(indices.keys())[i+1]]\n sectioned_text[hd] = full_text[start_id:end_id]\n\n if i == len(headers_list) - 1: #final header\n start_id = indices[list(indices.keys())[i]]\n sectioned_text[hd] = full_text[start_id:]\n\n return sectioned_text", "def _split(self):\n text = self.md\n self.parts = parts = []\n self.headers = headers = []\n lines = []\n\n # Split in parts\n for line in text.splitlines():\n if line.startswith((\"# \", \"## \", \"### \", \"#### \", \"##### \")):\n # Finish pending lines\n parts.append(\"\\n\".join(lines))\n lines = []\n # Process header\n level = len(line.split(\" \")[0])\n title = line.split(\" \", 1)[1]\n title_short = title.split(\"(\")[0].split(\"<\")[0].strip().replace(\"`\", \"\")\n headers.append((level, title_short))\n parts.append((level, title_short, title))\n else:\n lines.append(line)\n parts.append(\"\\n\".join(lines))\n\n # Now convert all text to html\n for i in range(len(parts)):\n if not isinstance(parts[i], tuple):\n parts[i] = markdown.markdown(parts[i], extensions=[]) + \"\\n\\n\"", "def parse_line(self, atline: List, list_of_lines: List, part: PART, afix: AFIX, resi: RESI) -> None:\n uvals = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n self.name = atline[0][:4] # Atom names are limited to 4 characters\n for n, u in enumerate(atline[6:12]):\n uvals[n] = float(u)\n self.uvals_orig = uvals[:]\n self.set_uvals(uvals)\n self._line_numbers = list_of_lines\n self.part = part\n self.afix = afix\n self.resi = resi\n self._get_part_and_occupation(atline)\n self.x, self.y, self.z = self._get_atom_coordinates(atline)\n self.xc, self.yc, self.zc = self._cell.o * Array(self.frac_coords)\n if abs(self.uvals[1]) > 0.0 and self.uvals[2] == 0.0 and self.shx.hklf: # qpeaks are always behind hklf\n self.peak_height = uvals[1]\n self.qpeak = True\n if self.shx.end: # After 'END' can only be Q-peaks!\n self.qpeak = True\n self.sfac_num = int(atline[1])\n self.shx.fvars.set_fvar_usage(self.fvar)\n self.Ucif = self.set_ucif(uvals)\n # TODO: I am still unsure if this these are correct values:\n # self.Ustar = self.Ucif * self._cell.N * self._cell.N.T\n # self.Ucart = self.Ustar * self._cell.o * self._cell.o.T\n # self.Ueq = self.set_ueq(uvals)\n # self.Uiso = self.Ueq\n # transformed_u = self.transform_u_by_symmetry(2)\n # print(self.name, [round(x, 6) for x in transformed_u], self.frac_coords)", "def make_fields(self, ftext):\n fields = [{'type': 'mrkdwn', 'text': x} for x in ftext]\n return utils.chunks(fields, 10)", "def message(self, text):\n lines = str(text).split('\\n') # Split at newline(s)\n for i, line in enumerate(lines): # For each substring...\n if i > 0: # If newline(s),\n self.write_lcd(self.LCD_DATA_E1, 0xC0) # set DDRAM address to 2nd line\n self.write_lcd(self.LCD_DATA_E1, line, True) # Issue substring", "def _parse_general_info(self, line):\n if self._regex_helper.search_compiled(W._re_general_info, line):\n self.current_ret['GENERAL_INFO'].update({\n 'time': datetime.datetime.strptime(self._regex_helper.group(\"TIME\"), '%H:%M:%S').time(),\n 'uptime': self._regex_helper.group(\"UPTIME\"),\n 'user_number': self._regex_helper.group(\"USER_NUMBER\"),\n 'load_average': self._regex_helper.group(\"L_AVERAGE\")\n })\n raise ParsingDone" ]
[ "0.60554934", "0.58570105", "0.5800822", "0.57903785", "0.5709253", "0.5683452", "0.56657594", "0.5564154", "0.54445773", "0.5323824", "0.52994883", "0.5294524", "0.5293484", "0.5284312", "0.52493984", "0.5248615", "0.5206312", "0.51865774", "0.5184688", "0.5175195", "0.5164879", "0.5164168", "0.5132815", "0.51286614", "0.51051915", "0.51050574", "0.50853634", "0.5054095", "0.50410324", "0.50316143", "0.502693", "0.4984561", "0.49697244", "0.49387416", "0.49312773", "0.4925569", "0.49248585", "0.49223462", "0.49218062", "0.49107608", "0.49057007", "0.49054393", "0.4890138", "0.48822138", "0.48802722", "0.48773447", "0.48675996", "0.48675895", "0.48674113", "0.48623464", "0.4860875", "0.4860203", "0.4852083", "0.48259747", "0.4822113", "0.48136708", "0.48077786", "0.48002347", "0.47900376", "0.47871393", "0.4785602", "0.47838923", "0.47709882", "0.47700366", "0.47680372", "0.47661567", "0.47606218", "0.47598562", "0.47546524", "0.47520554", "0.4749382", "0.47487336", "0.47449565", "0.47422695", "0.4739449", "0.47393712", "0.47378835", "0.47365853", "0.47358936", "0.47323355", "0.47322857", "0.47187245", "0.4707321", "0.47034168", "0.46880442", "0.4680616", "0.46783555", "0.46702594", "0.46690023", "0.46675763", "0.46663308", "0.4664351", "0.46608123", "0.46543896", "0.46535444", "0.4651949", "0.46500397", "0.46500337", "0.4641225", "0.4637289" ]
0.7433099
0
Adds the parsed items to the MailFields object.
def _add_to_fields(self, tag, data): # Addressee data if 'Recipient' == tag: names = data.split() for name in names: self.__fields.addressee_line['all_names'].append(name) # Probable box data # Strip out anything that's not a number since we might get some other # data inside here also. If the box # can be a subnumber (BOX 102-A) then # we'll end up putting everything in the # only. if 'USPSBoxGroupID' == tag or 'USPSBoxGroupType' == tag or \ 'USPSBoxID' == tag or 'USPSBoxType' == tag or \ 'OccupancyType' == tag or 'OccupancyIdentifier' == tag or \ 'SubaddressType' == tag or 'SubaddressIdentifier' == tag: box = re.search('\d+', data) if box is not None: self.__fields.probable_box.append(box.group(0)) # Street data # Discarding street number prefix and suffix for now if 'AddressNumber' == tag: self.__fields.street_line['number'].append(data) if 'StreetName' == tag: self.__fields.street_line['street_name'].append(data) # City data if 'PlaceName' == tag: self.__fields.city_line['city'].append(data) if 'StateName' == tag: self.__fields.city_line['state'].append(data) if 'ZipCode' == tag: self.__fields.city_line['zip_code'].append(data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_fields(self, fields):\n for field in fields:\n self.add(field)", "def add_fields(self, fields):\n for label, data in fields.items():\n self[label] = data", "def parse(self):\n\t\tfor part in self.mail.walk():\n\t\t\tself.process_part(part)", "def extract_form_fields(item):\n # Strip off any trailing \\r\\n\n formitems = item.value.rstrip('\\r\\n')\n # Split the items by newline, this gives us a list of either 1, 3, 4\n # or 5 items long\n itemlist = formitems.split(\"\\n\")\n # Setup some regular expressions to parse the items\n re_list = [\n re.compile(\n '^[0-1][0-9]:[0-5][0-9]:[0-5][0-9] DEBUG - $'),\n re.compile('^(payload)({\".*)$'),\n re.compile('^([a-z]+): (.*)$'),\n ]\n itemdict = {}\n # Go through the 1, 3, 4 or 5 items list\n for anitem in itemlist:\n # Compare each item to the regular expressions\n for a_re in re_list:\n match = re.search(a_re, anitem)\n if match:\n if len(match.groups()) == 0:\n # We have a match but no groups, must be\n # the preamble.\n itemdict['preamble'] = match.group(0)\n elif len(match.groups()) == 2:\n # All other re's should have 2 matches\n itemdict[match.group(1)] = match.group(2)\n # We already have a match, skip other regular expressions.\n continue\n return itemdict", "def populate_fields(self):\n self.group_name = self._extract_group_name()\n self.services = self._extract_services()\n\n self._logger.debug(\"Parsed value: {}\".format([\n self.group_name,\n self.services]))", "def parse_items(self):", "def add_fields(self, *fields: Field):\n self.fields.extend(fields)", "def populate_data_from_message(self, msg):\n for field in self:\n try:\n setattr(field, 'data', getattr(msg, field.name))\n except:\n continue", "def _append_customfield_fields(self):\n for customfield in self._get_custom_fields(self._get_content_type()):\n if customfield.ui_visibility == CustomFieldVisibilityChoices.VISIBILITY_HIDDEN:\n continue\n\n field_name = f'cf_{customfield.name}'\n self.fields[field_name] = self._get_form_field(customfield)\n\n # Annotate the field in the list of CustomField form fields\n self.custom_fields[field_name] = customfield\n if customfield.group_name not in self.custom_field_groups:\n self.custom_field_groups[customfield.group_name] = []\n self.custom_field_groups[customfield.group_name].append(field_name)", "def __init__(self, *args, **kwargs):\n super(ListFieldType, self).__init__(*args, **kwargs)\n\n self.item_info = self.field_info.get('items')", "def parse_fields(self, request, fields=None, skip=set(), additional=[]):\n fields = fields or self.fields\n fields = [f for f in fields if f.name not in skip]\n fields.extend(additional)\n result = dict()\n for field in fields:\n try:\n result[field.name] = field.get_value(request, self)\n except ValueError, msg:\n raise HTTP_BAD_REQUEST(str(msg))\n return result", "def parse_text_lines(self, text_lines):\n\n self.__fields = mail_fields.MailFields()\n\n alphanum_threshold = 0.5\n\n # Only evaluate lines that are predominantly alphanumeric\n for line in text_lines:\n if _alnum_percent(line) > alphanum_threshold: \n try:\n parsed = usaddress.tag(line)[0]\n except usaddress.RepeatedLabelError as e:\n # If usaddress gets confused, just throw away the answer as if\n # we got nothing for now.\n # TODO(searow): fix this to handle multiple tags and labels.\n parsed = {}\n for tag in parsed:\n self._add_to_fields(tag, parsed[tag])\n return self.__fields", "def _init_fields(self):\n if self._fields is None:\n M.mset('U', \"^\") # DBS Calls Require this\n f = self._fields = {}\n attrs = self.fieldnames = {}\n fieldid = \"0\"\n while 1:\n # Subscript 0 is field description, .1 is the title, 3 is help\n fieldid, info, title, fieldhelp = M.ddwalk(self._fileid, fieldid)\n #fieldid, info, title, fieldhelp = M.mexec(\n # \"\"\"set s0=$order(^DD(s2,s0)) Q:s0'=+s0 s s1=$G(^DD(s2,s0,0)),s3=$G(^DD(s2,s0,.1)),s4=$G(^DD(s2,s0,3))\"\"\",\n # M.INOUT(str(fieldid)), M.INOUT(\"\"), str(self._fileid), M.INOUT(\"\"), M.INOUT(\"\"))\n if fieldid == \"\" or fieldid[0] not in \"0123456789.\":\n break\n\n info = info.split(\"^\", 4) \n label = self._clean_label(info[0])\n try:\n ftype = info[1]\n except:\n ftype = None\n if ftype:\n finst = None\n for klass in FIELD_TYPES:\n if klass.isa(ftype):\n finst = f[fieldid] = klass(fieldid, label, info)\n finst.fileid = self.fileid\n finst.ownerdd = self\n attrs[label] = fieldid\n break\n if finst is None:\n print finst, \"FIELD [%s], spec [%s] was not identified\" % (label, ftype)\n continue\n finst.title = title\n finst.fieldhelp = fieldhelp\n else:\n assert finst, \"FIELD [%s] %s has no fieldspec\" % (label, info)\n\n return self._fields", "def set_up_fields(self, fields):\n self.fields = {\n 'name': self.name,\n 'email': self.email\n }\n for key in fields.keys():\n # special keys first, not to be used in the template\n if key.upper() == 'CC':\n self.is_cc = fields[key]\n elif key.upper() == 'BCC':\n self.is_bcc = fields[key]\n else:\n self.fields[key] = fields[key]", "def parse_fields(self, response, fields_dict, net_start=None,\n net_end=None, dt_format=None, field_list=None):\n\n ret = {}\n\n if not field_list:\n\n field_list = ['name', 'handle', 'description', 'country', 'state',\n 'city', 'address', 'postal_code', 'emails',\n 'created', 'updated']\n\n generate = ((field, pattern) for (field, pattern) in\n fields_dict.items() if field in field_list)\n\n for field, pattern in generate:\n\n pattern = re.compile(\n str(pattern),\n re.DOTALL\n )\n\n if net_start is not None:\n\n match = pattern.finditer(response, net_end, net_start)\n\n elif net_end is not None:\n\n match = pattern.finditer(response, net_end)\n\n else:\n\n match = pattern.finditer(response)\n\n values = []\n sub_section_end = None\n for m in match:\n\n if sub_section_end:\n\n if field not in (\n 'emails'\n ) and (sub_section_end != (m.start() - 1)):\n\n break\n\n try:\n\n values.append(m.group('val').strip())\n\n except IndexError:\n\n pass\n\n sub_section_end = m.end()\n\n if len(values) > 0:\n\n value = None\n try:\n\n if field == 'country':\n\n value = values[0].upper()\n\n elif field in ['created', 'updated'] and dt_format:\n\n value = datetime.strptime(\n values[0],\n str(dt_format)).isoformat('T')\n\n elif field in ['emails']:\n\n value = list(unique_everseen(values))\n\n else:\n\n values = unique_everseen(values)\n value = '\\n'.join(values).strip()\n\n except ValueError as e:\n\n log.debug('Whois field parsing failed for {0}: {1}'.format(\n field, e))\n pass\n\n ret[field] = value\n\n return ret", "def parse(self):\n result = []\n for field in self.get_fields():\n result.append(field.get_field())\n return result", "def all_fields(item):\n return scom.all_fields(item)", "def update_fields(self):\n if hasattr(self.day, \"body_composition\"):\n for f in self.get_fields():\n name = f.get_field().name\n value = getattr(self.day.body_composition, name, None)\n if value is not None:\n f.set_field(value)\n else:\n f.set_field(\"\")", "def appendedEntries(self):\n self.contact_list.append({\"name\": self.first_name.title() + \" \" + self.last_name.title(), \"phone number\": self.phone_number, \"phone number type\": self.phone_number_type})", "def __createFields(self):\n fields = self.updateFields\n for field in fields:\n self.__createField(field)", "def _parse_records(self, customization=None):\n def _add_parsed_record(record, records):\n \"\"\"\n Atomic function to parse a record\n and append the result in records\n \"\"\"\n if record != \"\":\n logger.debug('The record is not empty. Let\\'s parse it.')\n parsed = self._parse_record(record, customization=customization)\n if parsed:\n logger.debug('Store the result of the parsed record')\n records.append(parsed)\n else:\n logger.debug('Nothing returned from the parsed record!')\n else:\n logger.debug('The record is empty')\n\n records = []\n record = \"\"\n # read each line, bundle them up until they form an object, then send for parsing\n for linenumber, line in enumerate(self.bibtex_file_obj):\n logger.debug('Inspect line %s', linenumber)\n if line.strip().startswith('@'):\n # Remove leading whitespaces\n line = line.lstrip()\n logger.debug('Line starts with @')\n # Parse previous record\n _add_parsed_record(record, records)\n # Start new record\n logger.debug('The record is set to empty')\n record = \"\"\n # Keep adding lines to the record\n record += line\n\n # catch any remaining record and send it for parsing\n _add_parsed_record(record, records)\n logger.debug('Set the list of entries')\n self.bib_database.entries = records", "def format_data(self, _item_fields, special=None):\n\n if special:\n _item_fields[\"special\"] = special\n\n return _item_fields", "def meta_fields(item):\n return scom.meta_fields(item)", "def process_entry(self):\n\n for line_item in self.entry:\n pairs = line_item.split(' ')\n for pair in pairs:\n if ':' in pair:\n key, value = pair.split(':')\n if value.isdigit():\n self.fields[key] = int(value)\n else:\n self.fields[key] = value", "def dict_to_fm_field_list(\n self, data: Dict[str, Any], language_code: str, line: int = 0\n ) -> nodes.field_list:\n field_list = nodes.field_list()\n\n bibliofields = get_language(language_code).bibliographic_fields\n state_machine = MockStateMachine(self, line)\n state = MockState(self, state_machine, line)\n\n for key, value in data.items():\n if not isinstance(value, (str, int, float, date, datetime)):\n value = json.dumps(value)\n value = str(value)\n if key in bibliofields:\n para_nodes, _ = state.inline_text(value, line)\n body_children = [nodes.paragraph(\"\", \"\", *para_nodes)]\n else:\n body_children = [nodes.Text(value, value)]\n\n field_node = nodes.field()\n field_node.source = value\n field_node += nodes.field_name(key, \"\", nodes.Text(key, key))\n field_node += nodes.field_body(value, *body_children)\n field_list += field_node\n\n return field_list", "def fields(self, fields: List[SingleField]):\n\n self._fields = fields", "def make_fields(self, ftext):\n fields = [{'type': 'mrkdwn', 'text': x} for x in ftext]\n return utils.chunks(fields, 10)", "def __extract_fields(self):\n for name, stuff in self.data.items():\n if stuff == (): # Empty tuple == 1 bit, value of 0\n self.fields.append(Field(name=name, value=0, size=1))\n elif isinstance(stuff, int): # int == specified value, value of 0\n self.fields.append(Field(name=name, value=0, size=stuff))\n elif isinstance(stuff, str): # str == specified value, value of 0\n pattern = re.compile(\"[0-9]+[bB]\")\n if pattern.match(stuff):\n if \"b\" in stuff: # bits specified\n size = int(stuff[:stuff.lower().index(\"b\")])\n self.fields.append(Field(name=name, value=0, size=size))\n elif \"B\" in stuff: # Bytes specified\n size = int(stuff[:stuff.lower().index(\"b\")]) * 8\n self.fields.append(Field(name=name, value=0, size=size))\n else: # No other string option, so must have been one of the \"vary\" constants from above.\n self.fields.append(Field(name=name, value=stuff, size=\"vary\"))\n elif isinstance(stuff, tuple) or isinstance(stuff, list): # specified value and size.\n if isinstance(stuff[0], str):\n if \"b\" in stuff[0]: # Bits\n size = int(stuff[0][:stuff[0].lower().index(\"b\")])\n # if not self.__check_bit_size(stuff[1], size):\n # raise Exception(\"error. \" + str(stuff[1]) + \" cannot be fit in \" + str(size) + \" bits.\")\n self.fields.append(Field(name=name, value=stuff[1], size=size))\n elif \"B\" in stuff[0]: # Bytes\n size = int(stuff[0][:stuff[0].lower().index(\"b\")]) * 8\n # if not self.__check_bit_size(stuff[1], size):\n # raise Exception(\"error. \" + str(stuff[1]) + \" cannot be fit in \" + str(size) + \" bits.\")\n self.fields.append(Field(name=name, value=stuff[1], size=size))\n elif stuff[0].lower() == NULL_TERMINATE:\n self.fields.append(Field(name=name, value=stuff[1], size=NULL_TERMINATE))\n elif stuff[0].lower() == PREFIX_LENGTH:\n self.fields.append(Field(name=name, value=stuff[1], size=PREFIX_LENGTH))\n elif stuff[0].lower() == PREFIX_LEN_NULL_TERM:\n self.fields.append(Field(name=name, value=stuff[1], size=PREFIX_LEN_NULL_TERM))\n elif stuff[0].lower() == IPv4:\n self.fields.append(Field(name=name, value=stuff[1], size=IPv4))\n elif isinstance(stuff[0], int):\n # if not self.__check_bit_size(stuff[1], stuff[0]):\n # raise Exception(\"error. \" + str(stuff[1]) + \" cannot be fit in \" + str(stuff[0]) + \" bits.\")\n self.fields.append(Field(name=name, value=stuff[1], size=stuff[0]))", "def add_new_item_field(*fields, **keywords):\n\n for field in fields:\n print \"Creating {0} custom field...\".format(field)\n doc = frappe.get_doc({\n \"doctype\": \"Custom Field\",\n \"dt\": \"Item\",\n \"fieldtype\": \"Data\",\n \"label\": field,\n \"insert_after\": keywords['insert_after']\n })\n doc.insert()\n\n print \"-----\"\n print \"Finished creating custom fields...\"\n print \"-----\"", "def payload_parse(self, mail):\n\t\tif mail.is_multipart():\n\t\t\tfor payload in mail.get_payload():\n\t\t\t\tif payload.get_content_maintype() == \"multipart\":\n\t\t\t\t\tself.payload_parse(payload)\n\t\t\t\telse:\n\t\t\t\t\tself.payload_handle(payload, mail)\n\t\t\t# Post deletion of payloads:\n\t\t\tself.payload_delete(mail)", "def set_fields(self):\n self.__fields = ['id',\n 'Request_Received','First_Name','Last_Name','Middle_Name',\n 'DOB','Gender','Nationality','City','State','Pincode','Qualification',\n 'Salary','PAN_Number']\n self.__response_field = ['id','Request_Id','Response_Generated']", "def parseFieldAll(fn):\n content = getFileContent(fn)\n if content is not None:\n return parseInternalFieldContent(content), parseBoundaryContent(content)\n else:\n return None", "def add_items(self, items):\n for item in items:\n self.add(item)", "def parse_details(self, response, item=None):\n \n assert item is not None, \"Provide an item\"\n \n if response:\n # Use individual WARN notice url\n item['url'] = response.url\n\n fields = item['fields']\n \n dt = get_text_of_matching_elements(response, '//dt')\n dd = get_text_of_matching_elements(response, '//dd')\n\n data = dict(zip(dt, dd))\n \n # Update fields with additional data\n fields.update(data)\n item['fields'] = fields\n\n # Generate normalized fields\n norm_fields = get_normalized_fields(self.fields_dict, pd.Series(fields)).to_dict()\n item['normalized_fields'] = norm_fields \n\n yield item", "def updateFromFields(self, fields, data):\n self._fields = fields\n data = [d if d is not None else '' for d in data]\n for field,val in zip(fields, data):\n setattr(self, field, val)", "def add(self, emails):\r\n request = http.Request('POST', self.get_url(), emails)\r\n\r\n return request, parsers.parse_json", "def _fields(self, doclet):\n FIELD_TYPES = OrderedDict([('params', _params_formatter),\n ('properties', _params_formatter),\n ('exceptions', _exceptions_formatter),\n ('returns', _returns_formatter)])\n for field_name, callback in iteritems(FIELD_TYPES):\n for field in doclet.get(field_name, []):\n description = field.get('description', '')\n unwrapped = sub(r'[ \\t]*[\\r\\n]+[ \\t]*', ' ', description)\n yield callback(field, unwrapped)", "def _parse_line(self, line):\n fields = line.split('|', 4) # stop splitting after fourth | found\n line_info = {'raw_message': line}\n if len(fields) == 5:\n line_info.update(dict(zip(self._fieldnames, fields)))\n return line_info", "def _modify(self, fields):\n return fields", "def processItem(self):\r\n self.extract()\r\n self.mergeLanguageClaims()\r\n self.validateClaims()\r\n self.mergeWithWikidata()\r\n self.writeToWikidata()\r\n self.log()", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def setup_known_fields(self):\n\n kfields = dict(self.known_fields)\n freg = re.compile(r\"(^.+)_\\d+$\")\n for field in self:\n if self[field].get(\"units\") is not None:\n continue\n\n if field in kfields:\n self[field][\"units\"] = kfields[field]\n continue\n\n fs = freg.search(field)\n if fs and fs.groups()[0] in kfields:\n self[field][\"units\"] = kfields[fs.groups()[0]]", "def add_mapping_fields(self, mapping, analyzer_lang, analyzer_case_insensitive_sort):\r\n # Specific fields email\r\n analyzer_email = analysis.analyzer('email', tokenizer=analysis.tokenizer('uax_url_email'),\r\n filter=['lowercase', 'unique'])\r\n mapping.field('fromName', 'text', analyzer=analyzer_lang,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('fromEmail', 'text', analyzer=analyzer_email,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('toName', 'text', analyzer=analyzer_lang,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('toEmail', 'text', analyzer=analyzer_email,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('replyToName', 'text', analyzer=analyzer_lang,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('replyToEmail', 'text', analyzer=analyzer_email,\r\n fields={\r\n 'keyword': 'keyword',\r\n })\r\n mapping.field('subject', 'text', analyzer=analyzer_lang)\r\n mapping.field('date', 'date')\r\n mapping.field('body', 'text', analyzer=analyzer_lang)\r\n mapping.field('spam', 'boolean')\r\n mapping.field('hasAttachmet', 'boolean')\r\n mapping.field('attachmentNames', 'text', analyzer=analyzer_lang)", "def parse_mail(self, m):\n addrs = []\n if isinstance(m, email.message.Message):\n get_header = m.get\n else:\n get_header = m.get_header\n for h in ('to', 'from', 'cc', 'bcc'):\n v = get_header(h)\n if v:\n addrs.append(v)\n for addr in email.utils.getaddresses(addrs):\n name = addr[0].strip('; ')\n address = addr[1].lower().strip(';\\'\" ')\n if (address and address not in self.addresses):\n self.addresses[address] = name\n yield (name, address)", "def add_fields(self, field_names, allow_m2m=True):\n alias = self.get_initial_alias()\n opts = self.get_meta()\n\n try:\n for name in field_names:\n field, target, u2, joins, u3, u4 = self.setup_joins(\n name.split(LOOKUP_SEP), opts, alias, False, allow_m2m,\n True)\n final_alias = joins[-1]\n col = target.column\n cols = [] if hasattr(col, \"columns\") else [col]\n for col in cols:\n if len(joins) > 1:\n join = self.alias_map[final_alias]\n if col == join.rhs_join_col:\n self.unref_alias(final_alias)\n final_alias = join.lhs_alias\n col = join.lhs_join_col\n joins = joins[:-1]\n self.promote_joins(joins[1:])\n self.select.append((final_alias, col))\n self.select_fields.append(field)\n except MultiJoin:\n raise FieldError(\"Invalid field name: '%s'\" % name)\n except FieldError:\n if LOOKUP_SEP in name:\n # For lookups spanning over relationships, show the error\n # from the model on which the lookup failed.\n raise\n else:\n names = sorted(opts.get_all_field_names() + list(self.extra)\n + list(self.aggregate_select))\n raise FieldError(\"Cannot resolve keyword %r into field. \"\n \"Choices are: %s\" % (name, \", \".join(names)))\n self.remove_inherited_models()", "def get_fields(self):\n fields = []\n for items in self.order_items:\n fields += items.get_fields()\n \n fields = list(set(fields))\n \n field_order = ['recordId', 'orderId', 'itemId', 'collectionId']\n \n out_fields = field_order\n \n for f in fields:\n if f not in field_order:\n out_fields.append(f)\n \n return out_fields", "def _set_multi_field_values(self):\n for block_key, block in self.block_field_values.items():\n data = self._create_soap_object('ArrayOfArrayOfString')\n names = self._create_soap_object('ArrayOfString')\n names.string = block[0].keys()\n data.ArrayOfString.append(names)\n\n for item in block:\n row = self._create_soap_object('ArrayOfString')\n row.string = item.values()\n data.ArrayOfString.append(row)\n\n self.client.service.SetBlockFieldValues(blockName=block_key, blockFieldValues=data)", "def umm_fields(item):\n return scom.umm_fields(item)", "def parse_lines(self, lines):\n assert isinstance(lines, Iterable)\n\n for line in lines:\n name, values = self.parse_line(line)\n self.add(name, values)", "def _parse_fields(self, fields):\n\n parsed_fields = set()\n\n if fields is not None and isinstance(fields, (list, tuple)):\n if len(fields) > 0 and isinstance(fields[0], (list,tuple)):\n parsed_fields.update(fields)\n else:\n parsed_fields.update([(x, None) for x in fields])\n\n # Does not support field.attname.\n field_names = set((field.name, None) for field in self.model._meta.fields if not field.primary_key)\n non_model_fields = parsed_fields.difference(field_names)\n if non_model_fields:\n raise ValueError(\"The following fields do not exist in this\"\n \" model: {0}\".format(\", \".join(x[0] for x in non_model_fields)))\n else:\n parsed_fields.update(self._find_text_fields())\n\n return parsed_fields", "def make_fields(self):\n for name, prop in self.edit:\n instance_value = self.model.get(name)\n post_value = self.data[name] if (self.data and self.data.has_key(name)) else instance_value\n form_field_class = self.get_field_type(prop)\n form_field = form_field_class(model=self.model, property=prop, name=name, instance_value=instance_value, post_value=post_value)\n self.add(form_field)", "def __add_to_contents(self, field_name, field_value, field_type):\n if type(field_value) is list:\n for fv in field_value:\n self.__add_to_contents(field_name, fv, field_type)\n else:\n if len(field_value) > 0: # ignore empty fields\n self.contents.append({'field_name': field_name,\n 'field_value': field_value,\n 'field_type': field_type})", "def fields(self, fields):\n\n self._fields = fields", "def add_data(self, in_data):\n old_data = {}\n for field in self.fields:\n # ToDo - might be a better way to determine the fieldname\n if field in in_data:\n if field in self.data:\n old_data = dict(self.data)\n self.data = {}\n\n self.data[field] = in_data[field]\n self.data['usUnits'] = in_data['usUnits']\n self.data['dateTime'] = in_data['dateTime']\n return old_data", "def updateAllLineFields(self):\n for format in self.values():\n format.updateLineFields()\n globalref.docRef.fileInfoFormat.updateLineFields()", "def additional_fields(self, additional_fields):\n\n self._additional_fields = additional_fields", "def add_items(self, items):\n for item in items:\n self.addItem(item)\n # end for item in items", "def _get_fields(self, album, extra):\n # Start with the configured base fields.\n if album:\n fields = self.config['albumfields'].as_str_seq()\n else:\n fields = self.config['itemfields'].as_str_seq()\n\n # Add the requested extra fields.\n if extra:\n fields += extra\n\n # Ensure we always have the `id` field for identification.\n fields.append('id')\n\n return set(fields)", "def parse_attachments(request):\n attachments = []\n for attachment in request.files.getlist('attachment'):\n attachments.append(Attachment(attachment.filename, attachment))\n return attachments", "def _extract(self):\r\n self._data = []\r\n for fname in self.files:\r\n meta = dict(filename=fname)\r\n\r\n # Perform the actual metadata extraction\r\n fname = os.path.splitext(self.filter_filename(fname))[0]\r\n values = fname.split(self.sep)\r\n\r\n # Handle the case where number of fields is less than the length\r\n # of the extracted values, ie cases where we only want to extract\r\n # a subset of available fields.\r\n if self.index:\r\n values = [val for i, val in enumerate(values) if i in self.index]\r\n\r\n meta.update(dict(zip(self.fields, values)))\r\n if self.split_by in self.fields:\r\n meta[self.split_by] = self._get_split_field_values(meta['filename'])\r\n self._data.append(meta)", "def _parse_item(self, item):\n result = {}\n for f in self._invoice_report_item_fields:\n val = get_value_by_relation_path(item, f)\n # when it's function - call it! usefull for Choices\n # (get_<field_name>_display)\n if callable(val):\n val = val()\n elif isinstance(val, datetime.datetime):\n val = val.strftime(self._invoice_report_datetime_format)\n elif isinstance(val, Money):\n val_currency = '{}_currency'.format(self._price_field)\n result[val_currency] = str(val.currency) \\\n if val.currency else self._invoice_report_empty_value\n val = val.amount\n result[f] = str(val) if val else self._invoice_report_empty_value\n\n return result", "def add_items(self, items: typing.Iterable[str]) -> None:\n for item in items:\n self.add_item(item)", "def too_many_custom_fields(upload_items: List[JSONDict]) -> JSONDict:\n altered = upload_items[0]\n altered[\"custom\"] = {str(x): str(x) for x in range(15)}\n return altered", "def add_field(self,\r\n fieldname,\r\n entrylist,\r\n check=False):\r\n if self.read_only:\r\n display.noteprint((alerts.ATTENTION,'CANNOT EXECUTE: READ ONLY'))\r\n return False\r\n\r\n for e_temp in entrylist:\r\n if str(e_temp) in self.default_dict['field'] and check:\r\n temp_query = alerts.CHANGE+BLANK+self.default_dict['field'][str(e_temp)]\\\r\n +BLANK+alerts.TO+BLANK+fieldname+QUESTIONMARK\r\n if input(temp_query) not in YESTERMS:\r\n self.default_dict['field'][str(e_temp)] = fieldname\r\n else:\r\n self.default_dict['field'][str(e_temp)] = fieldname\r\n self.dd_changed = True", "def _get_fields(self, xsession, freq, subject_id=None,\n visit_id=None, derived=False):\n fields = []\n for name, value in xsession.fields.items():\n fields.append(Field(\n name=name, value=value, derived=derived,\n frequency=freq, subject_id=subject_id,\n visit_id=visit_id, archive=self))\n return sorted(fields)", "def pull_fields(self, org):\n pass", "def extend(self, items):\n\t\tfor item in items:\n\t\t\tself.append(item)", "def list_process(field, item_list:List[str]):\n # if isinstance(item_list, list):\n if len(item_list) == 0:\n return {\n\n }\n saved_list = []\n\n for i in item_list:\n saved_list.append(f\"{i}\")\n return {\n field: \",\".join(saved_list)\n }", "def members(self, items):\n pass", "def validate_fields(cls, message_type: str, attachment_data: dict) -> None:", "def fields(self):\n self.update()\n return self.__fields", "def list_item_all_fields(self):\n return self.properties.get(\"ListItemAllFields\",\n ListItem(self.context, ResourcePath(\"ListItemAllFields\", self.resource_path)))", "def __format_additionals(additional):\n formated_additional = {\n 'contacts' : {},\n 'hosts' : {},\n 'registrars' : {}\n }\n\n if additional :\n for item in additional :\n if isinstance(item, objects.Contact) :\n formater.filterDisclosed(item)\n formated_additional['contacts'][item.roid] = item\n elif isinstance(item, objects.Host) :\n formated_additional['hosts'][item.name] = item\n elif isinstance(item, objects.Registrar) :\n formated_additional['registrars'][item.roid] = item\n\n return formated_additional", "def parseMsg(self):\n # These 4 elements are always present\n # \"ToUserName\"\n # \"FromUserName\"\n # \"CreateTime\"\n # \"MsgType\"\n\n # Following elements depends on MsgType\n # \"MsgId\"\n # \"Content\"\n # \"MediaId\"\n # \"PicUrl\"\n # \"Format\"\n # \"ThumbMediaId\"\n # \"Location_X\"\n # \"Location_Y\"\n # \"Scale\"\n # \"Label\"\n # \"Title\"\n # \"Description\"\n # \"Url\"\n # \"Event\"\n # \"EventKey\"\n # \"Ticket\"\n # \"Latitude\"\n # \"Longitude\"\n # \"Precision\"\n # \"Recognition\"\n\n def getField(req, key):\n if req.find(key) != None:\n return req.find(key).text\n\n\n msg = {}\n req = et.fromstring(self.request.body.decode(\"utf-8\"))\n\n # These 4 elements are always present\n msg[\"ToUserName\"] = getField(req, \"ToUserName\")\n msg[\"FromUserName\"] = getField(req, \"FromUserName\")\n msg[\"CreateTime\"] = getField(req, \"CreateTime\")\n msg[\"MsgType\"] = getField(req, \"MsgType\")\n\n # Following elements depends on MsgType\n msg[\"MsgId\"] = getField(req, \"MsgId\")\n msg[\"Content\"] = getField(req, \"Content\")\n msg[\"MediaId\"] = getField(req, \"MediaId\")\n msg[\"PicUrl\"] = getField(req, \"PicUrl\")\n msg[\"Format\"] = getField(req, \"Format\")\n msg[\"ThumbMediaId\"] = getField(req, \"ThumbMediaId\")\n msg[\"Location_X\"] = getField(req, \"Location_X\")\n msg[\"Location_Y\"] = getField(req, \"Location_Y\")\n msg[\"Scale\"] = getField(req, \"Scale\")\n msg[\"Label\"] = getField(req, \"Label\")\n msg[\"Title\"] = getField(req, \"Title\")\n msg[\"Description\"] = getField(req, \"Description\")\n msg[\"Url\"] = getField(req, \"Url\")\n msg[\"Event\"] = getField(req, \"Event\")\n msg[\"EventKey\"] = getField(req, \"EventKey\")\n msg[\"Ticket\"] = getField(req, \"Ticket\")\n msg[\"Latitude\"] = getField(req, \"Latitude\")\n msg[\"Longitude\"] = getField(req, \"Longitude\")\n msg[\"Precision\"] = getField(req, \"Precision\")\n msg[\"Recognition\"] = getField(req, \"Recognition\")\n return msg", "def add(self, items):\n if isinstance(items, list):\n self.items.extend(items)\n else:\n self.items.append(items)", "def add_items(self,items,form,prefix=''):\n for item in items:\n\n if isinstance(item,list) or isinstance(item,tuple):\n warnings.warn(\"warn_deprecated_inputitem\")\n try:\n item = compatInputItem(*item)\n except:\n pass\n \n if isinstance(item,dict):\n\n itemtype = item.get('itemtype',None)\n \n if itemtype == 'tab':\n self.add_tab(form,prefix=prefix,**item)\n\n elif itemtype == 'group':\n self.add_group(form,prefix=prefix,**item)\n\n else:\n self.add_input(form,prefix=prefix,**item)\n\n form.last = itemtype\n \n elif isinstance(item,QtGui.QWidget):\n # this allows including widgets which are not\n # input fields\n form.addWidget(item)\n form.last = None\n \n else:\n raise ValueError,\"Invalid input item (type %s). Expected a dict or a QWidget.\" % type(item)", "def items(self):\n for name in self.fields:\n yield name, getattr(self, name)", "def intialize_from_fields(self):\n raise NotImplementedError", "def add(self, item):\n pb = self._field.add()\n new_item = self._factory.make(item)\n for field in self._factory.PB_CLASS.DESCRIPTOR.fields_by_name.keys():\n if hasattr(new_item, field):\n if isinstance(\n self._factory.PB_CLASS.DESCRIPTOR.fields_by_name[\n field\n ].message_type,\n Descriptor,\n ):\n getattr(pb, field).CopyFrom(getattr(new_item, field))\n else:\n setattr(pb, field, getattr(new_item, field))\n new_item._pb = pb\n self._items.append(new_item)\n return new_item", "def process(self) -> None:\n self.parsed = email.message_from_bytes(self.rawmailcontent, policy=email.policy.EmailPolicy()) # type: email.message.EmailMessage\n\n self.subject = self.parsed[\"subject\"]\n\n if self.parsed[\"X-Jicket-Initial-ReplyID\"] is not None and self.parsed[\"X-Jicket-Initial-ReplyID\"] == self.parsed[\"In-Reply-To\"]:\n self.threadstarter = True\n elif self.config.ticketAddress in self.parsed[\"From\"]: # Take more heuristic approach\n self.threadstarter = True\n\n self.rawmailcontent = None # No need to store after processing\n\n self.get_text_bodies(self.parsed)\n self.textfrombodies()", "def _get_info_from_fields(self, fields):\n info = []\n for field in fields:\n if field is icemac.ab.calendar.interfaces.IEvent['persons']:\n value = self.persons\n else:\n schema_field = (\n icemac.addressbook.entities.get_bound_schema_field(\n self.context, None, field,\n default_attrib_fallback=False))\n try:\n value = schema_field.get(schema_field.context)\n except AttributeError:\n # Field defined on IEvent but not on IRecurringEvent, thus\n # it does not exist on the RecurredEvent.\n value = None\n if value is not None:\n value = six.text_type(value)\n if value:\n if field is icemac.ab.calendar.interfaces.IEvent['text']:\n info.extend(value.split('\\n'))\n else:\n info.append(value)\n return info", "def _set_field_values(self):\n data = self._create_soap_object('ArrayOfArrayOfString')\n\n arr1 = self._create_soap_object('ArrayOfString')\n arr1.string = self.field_values.keys()\n\n arr2 = self._create_soap_object('ArrayOfString')\n arr2.string = self.field_values.values()\n\n data.ArrayOfString.append(arr1)\n data.ArrayOfString.append(arr2)\n\n self.client.service.SetFieldValues(fieldValues=data)", "def make_gaf_list( self, pageslips_list ):\n new_item_list = []\n pageslip_count = 0\n for item in pageslips_list:\n try:\n parser = utility_code.Parser()\n record_number = utility_code.parseRecordNumber(item)\n book_barcode = parser.parse_bookbarcode( item )\n las_delivery_stop = utility_code.parseJosiahPickupAtCode(item)\n las_customer_code = parser.parse_josiah_location_code( item )\n patron_name = utility_code.parsePatronName(item)\n patron_barcode = utility_code.parsePatronBarcode(item)\n title = parser.parse_title( item )\n las_date = utility_code.prepareLasDate()\n note = parser.parse_note( item )\n full_line = '''\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\"''' % ( record_number, book_barcode, las_delivery_stop, las_customer_code, patron_name, patron_barcode, title, las_date, note )\n new_item_list.append( full_line )\n pageslip_count = pageslip_count + 1\n if pageslip_count % 10 == 0:\n log.debug( '`%s` pageslips processed so far...' % pageslip_count )\n except Exception, e:\n subject = 'annex process pageslips problem'\n message = 'iterating through item_list; problem with item \"%s\"; exception is: %s' % ( item, unicode(repr(e)) )\n logger.error( message )\n m = Mailer( subject, message )\n m.send_email()\n log.info( '`%s` items parsed' % pageslip_count )\n log.debug( 'new_item_list, ```%s```' % pprint.pformat(new_item_list) )\n return new_item_list", "def get_items(self):\n\n items = super(FilingBusinessCaseByline, self).get_items()\n index = [attr.get('class') for attr in items].index('email')\n\n filing_number = {\n 'class': 'filing_no',\n 'label': _('label_filing_no', default='Filing Number'),\n 'content': self.get_filing_no(),\n 'replace': False}\n\n items.insert(index, filing_number)\n return items", "def set_list_fields(self,fields):\n # import pdb;pdb.set_trace()\n \n if not fields:\n fields = []\n \n list_fields_temp = [x for x in fields] # make a copy\n \n if not isinstance(fields,list):\n fields = [fields]\n \n for field in fields:\n if isinstance(field,str):\n # assume to be a field name\n field = {'name':field,'label':'{}'.format(self.make_label(field)),'class':'','search':True,'type':\"TEXT\",'list':True}\n if not isinstance(field,dict):\n continue # it must be a dict\n for x in range(len(list_fields_temp)-1,-1,-1): # turkey shoot loop\n default_field_dict = {'label':'','class':'','search':True}\n if not isinstance(list_fields_temp[x],dict) or 'name' not in list_fields_temp[x]:\n # bad element got into fields somehow...\n del list_fields_temp[x]\n continue\n if list_fields_temp[x].get('name',False) == field.get('name',None):\n default_field_dict = {'label':'','class':'','search':True,'type':'TEXT','default':'','list':True}\n for k in default_field_dict.keys():\n if k in field:\n default_field_dict.update({k:field[k]})\n elif k == 'type':\n field_type = \"TEXT\"\n try:\n field_type = self.table.get_column_type(field['name'])\n except KeyError:\n # the field name may be defined in the query \n pass\n default_field_dict.update({k:field_type})\n \n break\n \n list_fields_temp[x].update(default_field_dict)\n list_fields_temp[x]['label'] = list_fields_temp[x]['label'] if list_fields_temp[x]['label'] else self.make_label(list_fields_temp[x]['name'])\n if list_fields_temp[x]['search']:\n self.has_search_fields = True\n \n fields = list_fields_temp", "def _get_repr_body_fields(self) -> List[Tuple[str, Union[str, int, List[str]]]]:\n # Set up fields\n fields = {\n \"Uploader Name\": self.uploader_name,\n \"Metric\": self.task_evaluation_measure,\n \"Run ID\": self.run_id,\n \"Task ID\": self.task_id,\n \"Task Type\": self.task_type,\n \"Task URL\": openml.tasks.OpenMLTask.url_for_id(self.task_id),\n \"Flow ID\": self.flow_id,\n \"Flow Name\": self.flow_name,\n \"Flow URL\": openml.flows.OpenMLFlow.url_for_id(self.flow_id),\n \"Setup ID\": self.setup_id,\n \"Setup String\": self.setup_string,\n \"Dataset ID\": self.dataset_id,\n \"Dataset URL\": openml.datasets.OpenMLDataset.url_for_id(self.dataset_id),\n }\n\n # determines the order of the initial fields in which the information will be printed\n order = [\"Uploader Name\", \"Uploader Profile\", \"Metric\", \"Result\"]\n\n if self.uploader is not None:\n fields[\"Uploader Profile\"] = \"{}/u/{}\".format(\n openml.config.get_server_base_url(), self.uploader\n )\n if self.run_id is not None:\n fields[\"Run URL\"] = self.openml_url\n if self.evaluations is not None and self.task_evaluation_measure in self.evaluations:\n fields[\"Result\"] = self.evaluations[self.task_evaluation_measure]\n elif self.fold_evaluations is not None:\n # -- Add locally computed summary values if possible\n if \"predictive_accuracy\" in self.fold_evaluations:\n # OpenMLClassificationTask; OpenMLLearningCurveTask\n # default: predictive_accuracy\n result_field = \"Local Result - Accuracy (+- STD)\"\n fields[result_field] = self._evaluation_summary(\"predictive_accuracy\")\n order.append(result_field)\n elif \"mean_absolute_error\" in self.fold_evaluations:\n # OpenMLRegressionTask\n # default: mean_absolute_error\n result_field = \"Local Result - MAE (+- STD)\"\n fields[result_field] = self._evaluation_summary(\"mean_absolute_error\")\n order.append(result_field)\n\n if \"usercpu_time_millis\" in self.fold_evaluations:\n # Runtime should be available for most tasks types\n rt_field = \"Local Runtime - ms (+- STD)\"\n fields[rt_field] = self._evaluation_summary(\"usercpu_time_millis\")\n order.append(rt_field)\n\n # determines the remaining order\n order += [\n \"Run ID\",\n \"Run URL\",\n \"Task ID\",\n \"Task Type\",\n \"Task URL\",\n \"Flow ID\",\n \"Flow Name\",\n \"Flow URL\",\n \"Setup ID\",\n \"Setup String\",\n \"Dataset ID\",\n \"Dataset URL\",\n ]\n return [(key, fields[key]) for key in order if key in fields]", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .entity import Entity\n from .mail_search_folder import MailSearchFolder\n from .message import Message\n from .message_rule import MessageRule\n from .multi_value_legacy_extended_property import MultiValueLegacyExtendedProperty\n from .single_value_legacy_extended_property import SingleValueLegacyExtendedProperty\n\n from .entity import Entity\n from .mail_search_folder import MailSearchFolder\n from .message import Message\n from .message_rule import MessageRule\n from .multi_value_legacy_extended_property import MultiValueLegacyExtendedProperty\n from .single_value_legacy_extended_property import SingleValueLegacyExtendedProperty\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"childFolderCount\": lambda n : setattr(self, 'child_folder_count', n.get_int_value()),\n \"childFolders\": lambda n : setattr(self, 'child_folders', n.get_collection_of_object_values(MailFolder)),\n \"displayName\": lambda n : setattr(self, 'display_name', n.get_str_value()),\n \"isHidden\": lambda n : setattr(self, 'is_hidden', n.get_bool_value()),\n \"messageRules\": lambda n : setattr(self, 'message_rules', n.get_collection_of_object_values(MessageRule)),\n \"messages\": lambda n : setattr(self, 'messages', n.get_collection_of_object_values(Message)),\n \"multiValueExtendedProperties\": lambda n : setattr(self, 'multi_value_extended_properties', n.get_collection_of_object_values(MultiValueLegacyExtendedProperty)),\n \"parentFolderId\": lambda n : setattr(self, 'parent_folder_id', n.get_str_value()),\n \"singleValueExtendedProperties\": lambda n : setattr(self, 'single_value_extended_properties', n.get_collection_of_object_values(SingleValueLegacyExtendedProperty)),\n \"totalItemCount\": lambda n : setattr(self, 'total_item_count', n.get_int_value()),\n \"unreadItemCount\": lambda n : setattr(self, 'unread_item_count', n.get_int_value()),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def check_attachment_fields(self):\n for field_name, field in self.fields.items():\n if isinstance(field, serializers.ListSerializer):\n if hasattr(field.child, \"field\"):\n for child_name, child in field.child.field.items():\n self.handle_attachment_field(child, child_name)\n else:\n self.handle_attachment_field(field, field_name)", "def add_items(todofile, items):\n if(items is not None and len(items) > 0):\n for item in items:\n todofile.write_todo(parse_item(item))", "def add(line, *fieldnames):\n return sum(map(safe, map(lambda fieldname: line[fieldname], fieldnames)))", "def wp2fields(xml, wp_custpost=False):\r\n\r\n items = get_items(xml)\r\n for item in items:\r\n\r\n if item.find('status').string == \"publish\":\r\n\r\n try:\r\n # Use HTMLParser due to issues with BeautifulSoup 3\r\n title = HTMLParser().unescape(item.title.contents[0])\r\n except IndexError:\r\n title = 'No title [%s]' % item.find('post_name').string\r\n logger.warning('Post \"%s\" is lacking a proper title' % title)\r\n\r\n filename = item.find('post_name').string\r\n post_id = item.find('post_id').string\r\n filename = get_filename(filename, post_id)\r\n\r\n content = item.find('encoded').string\r\n raw_date = item.find('post_date').string\r\n date_object = time.strptime(raw_date, \"%Y-%m-%d %H:%M:%S\")\r\n date = time.strftime(\"%Y-%m-%d %H:%M\", date_object)\r\n author = item.find('creator').string\r\n\r\n categories = [cat.string for cat in item.findAll('category', {'domain' : 'category'})]\r\n # caturl = [cat['nicename'] for cat in item.find(domain='category')]\r\n\r\n tags = [tag.string for tag in item.findAll('category', {'domain' : 'post_tag'})]\r\n\r\n kind = 'article'\r\n post_type = item.find('post_type').string\r\n if post_type == 'page':\r\n kind = 'page'\r\n elif wp_custpost:\r\n if post_type == 'post':\r\n pass\r\n # Old behaviour was to name everything not a page as an article.\r\n # Theoretically all attachments have status == inherit so\r\n # no attachments should be here. But this statement is to\r\n # maintain existing behaviour in case that doesn't hold true.\r\n elif post_type == 'attachment':\r\n pass\r\n else:\r\n kind = post_type\r\n yield (title, content, filename, date, author, categories, tags,\r\n kind, \"wp-html\")", "def process(mlist, msg, msgdata):\n # Digests and Mailman-craft messages should not get additional headers.\n if msgdata.get('isdigest') or msgdata.get('nodecorate'):\n return\n d = {}\n member = msgdata.get('member')\n if member is not None:\n # Calculate the extra personalization dictionary.\n recipient = msgdata.get('recipient', member.address.original_email)\n d['member'] = formataddr(\n (member.subscriber.display_name, member.subscriber.email))\n d['user_email'] = recipient\n d['user_delivered_to'] = member.address.original_email\n d['user_language'] = member.preferred_language.description\n d['user_name'] = member.display_name\n # For backward compatibility.\n d['user_address'] = recipient\n # Calculate the archiver permalink substitution variables. This provides\n # the $<archive-name>_url placeholder for every enabled archiver.\n for archiver in IListArchiverSet(mlist).archivers:\n if archiver.is_enabled:\n # Get the permalink of the message from the archiver. Watch out\n # for exceptions in the archiver plugin.\n try:\n archive_url = archiver.system_archiver.permalink(mlist, msg)\n except Exception:\n alog.exception('Exception in \"{}\" archiver'.format(\n archiver.system_archiver.name))\n archive_url = None\n if archive_url is not None:\n placeholder = '{}_url'.format(archiver.system_archiver.name)\n d[placeholder] = archive_url\n # These strings are descriptive for the log file and shouldn't be i18n'd\n d.update(msgdata.get('decoration-data', {}))\n header = decorate('list:member:regular:header', mlist, d)\n footer = decorate('list:member:regular:footer', mlist, d)\n # Escape hatch if both the footer and header are empty or None.\n if len(header) == 0 and len(footer) == 0:\n return\n # Be MIME smart here. We only attach the header and footer by\n # concatenation when the message is a non-multipart of type text/plain.\n # Otherwise, if it is not a multipart, we make it a multipart, and then we\n # add the header and footer as text/plain parts.\n #\n # BJG: In addition, only add the footer if the message's character set\n # matches the charset of the list's preferred language. This is a\n # suboptimal solution, and should be solved by allowing a list to have\n # multiple headers/footers, for each language the list supports.\n #\n # Also, if the list's preferred charset is us-ascii, we can always\n # safely add the header/footer to a plain text message since all\n # charsets Mailman supports are strict supersets of us-ascii --\n # no, UTF-16 emails are not supported yet.\n #\n # TK: Message with 'charset=' cause trouble. So, instead of\n # mgs.get_content_charset('us-ascii') ...\n mcset = msg.get_content_charset() or 'us-ascii'\n lcset = mlist.preferred_language.charset\n msgtype = msg.get_content_type()\n # BAW: If the charsets don't match, should we add the header and footer by\n # MIME multipart chroming the message?\n wrap = True\n if not msg.is_multipart() and msgtype == 'text/plain':\n # Save the RFC-3676 format parameters.\n format_param = msg.get_param('format')\n delsp = msg.get_param('delsp')\n # Save 'Content-Transfer-Encoding' header in case decoration fails.\n cte = msg.get('content-transfer-encoding')\n # header/footer is now in unicode.\n try:\n oldpayload = msg.get_payload(decode=True).decode(mcset)\n del msg['content-transfer-encoding']\n frontsep = endsep = ''\n if len(header) > 0 and not header.endswith('\\n'):\n frontsep = '\\n'\n if len(footer) > 0 and not oldpayload.endswith('\\n'):\n endsep = '\\n'\n payload = header + frontsep + oldpayload + endsep + footer\n # When setting the payload for the message, try various charset\n # encodings until one does not produce a UnicodeError. We'll try\n # charsets in this order: the list's charset, the message's\n # charset, then utf-8. It's okay if some of these are duplicates.\n for cset in (lcset, mcset, 'utf-8'):\n try:\n msg.set_payload(payload.encode(cset), cset)\n except UnicodeError:\n pass\n else:\n if format_param:\n msg.set_param('format', format_param)\n if delsp:\n msg.set_param('delsp', delsp)\n wrap = False\n break\n except (LookupError, UnicodeError):\n if cte:\n # Restore the original c-t-e.\n del msg['content-transfer-encoding']\n msg['Content-Transfer-Encoding'] = cte\n elif msg.get_content_type() == 'multipart/mixed':\n # The next easiest thing to do is just prepend the header and append\n # the footer as additional subparts\n payload = msg.get_payload()\n if not isinstance(payload, list):\n payload = [payload]\n if len(footer) > 0:\n mimeftr = MIMEText(footer.encode(lcset), 'plain', lcset)\n mimeftr['Content-Disposition'] = 'inline'\n payload.append(mimeftr)\n if len(header) > 0:\n mimehdr = MIMEText(header.encode(lcset), 'plain', lcset)\n mimehdr['Content-Disposition'] = 'inline'\n payload.insert(0, mimehdr)\n msg.set_payload(payload)\n wrap = False\n # If we couldn't add the header or footer in a less intrusive way, we can\n # at least do it by MIME encapsulation. We want to keep as much of the\n # outer chrome as possible.\n if not wrap:\n return\n # Because of the way Message objects are passed around to process(), we\n # need to play tricks with the outer message -- i.e. the outer one must\n # remain the same instance. So we're going to create a clone of the outer\n # message, with all the header chrome intact, then copy the payload to it.\n # This will give us a clone of the original message, and it will form the\n # basis of the interior, wrapped Message.\n inner = Message()\n # Which headers to copy? Let's just do the Content-* headers\n for h, v in msg.items():\n if h.lower().startswith('content-'):\n inner[h] = v\n inner.set_payload(msg.get_payload())\n # For completeness\n inner.set_unixfrom(msg.get_unixfrom())\n inner.preamble = msg.preamble\n inner.epilogue = msg.epilogue\n # Don't copy get_charset, as this might be None, even if\n # get_content_charset isn't. However, do make sure there is a default\n # content-type, even if the original message was not MIME.\n inner.set_default_type(msg.get_default_type())\n # BAW: HACK ALERT.\n if hasattr(msg, '__version__'):\n inner.__version__ = msg.__version__\n # Now, play games with the outer message to make it contain three\n # subparts: the header (if any), the wrapped message, and the footer (if\n # any).\n payload = [inner]\n if len(header) > 0:\n mimehdr = MIMEText(header.encode(lcset), 'plain', lcset)\n mimehdr['Content-Disposition'] = 'inline'\n payload.insert(0, mimehdr)\n if len(footer) > 0:\n mimeftr = MIMEText(footer.encode(lcset), 'plain', lcset)\n mimeftr['Content-Disposition'] = 'inline'\n payload.append(mimeftr)\n msg.set_payload(payload)\n del msg['content-type']\n del msg['content-transfer-encoding']\n del msg['content-disposition']\n msg['Content-Type'] = 'multipart/mixed'", "def _replace_fields(self):\n for name, value in self._cleaned_data.items():\n setattr(self, name, value)", "def _set_data(self, new_data):\n for name, field in self._get_fields().items():\n if name in new_data:\n try:\n setattr(self, f\"__{name}\", field.from_raw(new_data[name]))\n except (fields.ValidationError, ValueError):\n # should at least log validation and value errors\n # this can happen in case of e.g. fields type change\n pass", "def populate_form(self, **kwargs):\n for name, value in kwargs.items():\n self.populate_field(name, value)", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def get_fields(self, request, obj=None):\n if obj:\n return self.fields\n return self.add_fields", "def iter_fields(self):\n # there are two ways a mergefield can be represented: the simple way with fldSimple and the more\n # complex way with instrText and fldChar.\n\n simple_fields = ((field.attrib[namespaced('instr')], field) for field in self._element.xpath('.//w:fldSimple'))\n complex_fields = ((field.text, field) for field in self._element.xpath('.//w:instrText'))\n\n for instr, field in itertools.chain(simple_fields, complex_fields):\n\n m = r.match(instr)\n if not m and self.strict:\n raise ValueError(\"Could not determine name of merge field with instr text '{}'\".format(instr))\n elif not m:\n logger.warning(\"Could not determine name of merge field with instr text '{}'. Skipping\".format(instr))\n continue\n\n yield m.group(1), field", "def _decode(self, parts: typing.List[int]) -> typing.Dict:\n info = {field.name: field.decode(parts[i]) for i, field in enumerate(self.fields)}\n return info" ]
[ "0.63991386", "0.6073475", "0.5998139", "0.5921391", "0.58640885", "0.5802949", "0.57639647", "0.5761297", "0.5378668", "0.5297593", "0.52813184", "0.52709", "0.52335626", "0.5191542", "0.5156435", "0.5155164", "0.5128716", "0.50924057", "0.5078077", "0.5077574", "0.5053269", "0.50265515", "0.5012257", "0.50003225", "0.49987805", "0.49942657", "0.49912974", "0.4989622", "0.49871", "0.4937105", "0.49331224", "0.4924805", "0.49169502", "0.4901293", "0.4885752", "0.48808336", "0.48781505", "0.48767397", "0.48742864", "0.487343", "0.48732978", "0.4858122", "0.4851701", "0.48456925", "0.48436913", "0.48324195", "0.48277983", "0.4825545", "0.48172235", "0.48164892", "0.48082644", "0.48058307", "0.48016286", "0.47923914", "0.47908235", "0.4787203", "0.4780578", "0.47559386", "0.47548687", "0.475437", "0.47455645", "0.47449166", "0.4738216", "0.47306365", "0.47233322", "0.47225443", "0.47193217", "0.47165638", "0.47093946", "0.47093648", "0.47040746", "0.4703404", "0.46979737", "0.46971282", "0.46873677", "0.468601", "0.46803486", "0.4671909", "0.4666942", "0.4664394", "0.46594405", "0.4656252", "0.4651184", "0.46487632", "0.46446118", "0.46359366", "0.4616041", "0.4609696", "0.4602299", "0.45970833", "0.45928422", "0.4590802", "0.45897856", "0.45823416", "0.45758194", "0.4572119", "0.4572119", "0.45688957", "0.45673403", "0.45642942" ]
0.6065927
2
initial method that store the arguments. of the class Rectangle.
def __init__(self, width, height, x=0, y=0, id=None): if type(width) is not int: raise TypeError("width must be an integer") if width is 0 or width < 0: raise ValueError("width must be > 0") if type(height) is not int: raise TypeError("height must be an integer") if height is 0 or height < 0: raise ValueError("height must be > 0") if type(x) is not int: raise TypeError("x must be an integer") if x < 0: raise ValueError("x must be >= 0") if type(y) is not int: raise TypeError("y must be an integer") if y < 0: raise ValueError("y must be >= 0") self.width = width self.height = height self.x = x self.y = y super().__init__(id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, height, width):\n\n\t\t# _width and _height are internal (private) Rectangle Instance's attributes. This is something\n\t\t# We keep to ourselves to make sure the User can't just update these attrs randomly and also\n\t\t# so that the code has backward compatibility.\n\t\tself._width = None\n\t\tself._height = None\n\n\t\t# Lets now use the SETTER Method the width and height of the newly initialised Rectangle Class\n\t\tself.width = width\n\t\tself.height = height", "def test_rectangle_full_init(self):\n r2 = Rectangle(10, 2, 1, 3, 12)\n self.assertEqual(r2.width, 10)\n self.assertEqual(r2.height, 2)\n self.assertEqual(r2.x, 1)\n self.assertEqual(r2.y, 3)\n self.assertEqual(r2.id, 12)", "def test_rectangle_partial_init(self):\n r1 = Rectangle(5, 4, 0, 0)\n self.assertEqual(r1.width, 5)\n self.assertEqual(r1.height, 4)\n self.assertEqual(r1.x, 0)\n self.assertEqual(r1.y, 0)\n self.assertEqual(r1.id, 2)", "def __init__(self, *args):\n _snap.TFltRect_swiginit(self, _snap.new_TFltRect(*args))", "def __init__(self, width, height):\n super(GRect, self).__init__()\n self.__init__(0, 0, width, height)", "def __init__(self, *args):\n if len(args) == 4:\n # left, top, width, height\n self.left, self.top, self.width, self.height = args\n elif len(args) == 2:\n # (left, top), (width, height)\n self.left, self.top = args[0]\n self.width, self.height = args[1]\n elif len(args) == 1:\n self.left = args[0].left\n self.top = args[0].top\n self.width = args[0].width\n self.height = args[0].height\n else:\n raise ValueError(\"Invalid arguments passed to Rect initializer\")", "def setUp(self):\n self.obj = Rectangle(1, 1)", "def __init___(self, x, y, width, height):\n super(GRect, self).__init__()\n frameWidth = width\n frameHeight = height\n setLocation(x, y)", "def update(self, *args, **kwargs):\n if args and len(args) > 0:\n if len(args) == 1:\n Base.__init__(self, args[0])\n elif len(args) == 2:\n Base.__init__(self, args[0])\n self.__width = args[1]\n elif len(args) == 3:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n elif len(args) == 4:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n elif len(args) == 5:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n self.__y = args[4]\n else:\n for key, value in kwargs.items():\n if key == 'width':\n Rectangle.width.__set__(self, value)\n elif key == 'height':\n Rectangle.height.__set__(self, value)\n elif key == 'x':\n Rectangle.x.__set__(self, value)\n elif key == 'y':\n Rectangle.y.__set__(self, value)\n elif key == 'id':\n Base.__init__(self, value)", "def transforminitargs(cls, *args, rectangle=None, **kwargs):\n rectanglekwargs = {}\n if rectangle is not None:\n rectanglekwargs = {\n **{\n field: getattr(rectangle, field)\n for field in dataclassy.fields(type(rectangle))\n }\n }\n return super().transforminitargs(\n *args,\n **rectanglekwargs,\n **kwargs,\n )", "def __init__(self, y: int):\n self.y = y\n self.x = 0\n self.first_x = 0\n self.second_x = Base.Width\n self.rect = pygame.Rect(self.x, self.y, Base.Width, Base.Height)", "def __init__(self, initX, initY):\n self.x = initX\n self.y = initY", "def __init__(self, initX, initY):\n self.x = initX\n self.y = initY", "def test_08_four_args(self):\n r = Rectangle(7, 2, 10, 20)\n self.assertEqual(r.id, 1)\n self.assertEqual(r.width, 7)\n self.assertEqual(r.height, 2)\n self.assertEqual(r.x, 10)\n self.assertEqual(r.y, 20)", "def __init__(self, width, height):\n self.integer_validator(\"width\", width)\n self.integer_validator(\"height\", height)\n self.__width = width\n self.__height = height", "def test_06_two_args(self):\n r = Rectangle(7, 2)\n self.assertEqual(r.id, 1)\n self.assertEqual(r.width, 7)\n self.assertEqual(r.height, 2)\n self.assertEqual(r.x, 0)\n self.assertEqual(r.y, 0)", "def __init__(self, width, height):\n self.integer_validator(\"width\", width)\n self.__width = width\n self.integer_validator(\"height\", height)\n self.__height = height", "def __init__(self):\r\n self.radius = BALL_RADIUS\r\n self.center_x = BALL_START_X\r\n self.center_y = BALL_START_Y\r\n self.velocity = BALL_SPEED\r\n self.angle = - math.pi / 2\r\n self.rectangle = pygame.Rect(self.center_x - self.radius, self.center_y - self.radius, 2 * self.radius, 2 * self.radius)\r\n self.color = \"white\"\r\n self.save_pos = (self.center_x, self.center_y)", "def __init__(self, left, top, width, height):\n self.left = left\n self.top = top\n self.width = width\n self.height = height", "def __init__(self, ll, ur):\n log.debug(\"Rect from ll {}, ur {}\".format(repr(ll), repr(ur)))\n # Ensure ll really is lower left and ur really is upper right\n self.ll = Point(min(ll.x, ur.x), min(ll.y, ur.y))\n log.debug(\"ll will be {}\".format(self.ll))\n self.ur = Point(max(ll.x, ur.x), max(ll.y, ur.y))\n log.debug(\"ur will be {}\".format(self.ur))\n log.debug(\"Created rect {}\".format(repr(self)))", "def __init__(self, x, y):\n # assigning the initial position\n self.x = x\n self.y = y", "def __init__(self, upper_left, lower_right):\n self.upper_left = upper_left\n self.lower_right = lower_right", "def __init__(self):\n\n self.width = 10\n self.height = 10\n self.new_game()", "def test_07_three_args(self):\n r = Rectangle(7, 2, 10)\n self.assertEqual(r.id, 1)\n self.assertEqual(r.width, 7)\n self.assertEqual(r.height, 2)\n self.assertEqual(r.x, 10)\n self.assertEqual(r.y, 0)", "def __init__(self, surface, rect):\n self.surface = surface\n self.rect = rect", "def __init__(self, surface, rect):\n self.surface = surface\n self.rect = rect", "def test_09_five_args(self):\n r = Rectangle(7, 2, 10, 20, 5)\n self.assertEqual(r.id, 5)\n self.assertEqual(r.width, 7)\n self.assertEqual(r.height, 2)\n self.assertEqual(r.x, 10)\n self.assertEqual(r.y, 20)", "def __init__(self, width, height):\n self.width = width\n self.height = height\n self.pos_x = START_X\n self.pos_y = START_Y\n self.col_d = False\n self.col_l = False\n self.col_r = False", "def __init__(self, r,g,b):\n self.__r = r; self.__g = g; self.__b = b", "def __init__(self, rect, image):\n self.rect = rect\n self.image = image\n self.state = self.S_ACTIVE", "def __init__(self, x, y):\n self._x, self._y = x, y", "def __init__(self, x, y):", "def addRect(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n pass", "def __init__(self, width, height, x, y):\n self.w = width\n self.h = height\n self.x = x\n self.y = y", "def initialize(self):\n\n super(RectTab,self).initialize()\n # special tkinter variables that will be changed with the scales\n self.width = tk.IntVar()\n self.height = tk.IntVar()\n\n # make width scale\n self.widthScale = tk.Scale(self, from_=1, to=5, orient=tk.HORIZONTAL,\n label='Width', resolution=1, variable=self.width,\n command=self.updateSize)\n self.widthScale.grid(column=2, row=6, columnspan=1, sticky='W' + 'E')\n self.widthScale.set(2)\n\n # make height scale\n self.heightScale = tk.Scale(self, from_=1, to=5, orient=tk.HORIZONTAL,\n label='Height', resolution=1, variable=self.height,\n command=self.updateSize)\n self.heightScale.grid(column=2, row=7, columnspan=1, sticky='W' + 'E')\n self.heightScale.set(2)", "def __init__(self, x, y):\n self.x, self.y = x, y", "def __init__(self,x=0,y=0):\n self.x = x\n self.y = y\n pass", "def __init__(self, width, height, x=0, y=0, id=None):\n __dict_args = {\"width\": width, \"height\": height, \"x\": x, \"y\": y}\n self.input_validator(__dict_args)\n self.__width = width\n self.__height = height\n self.__x = x\n self.__y = y\n super().__init__(id)", "def __init__(self, *args):\n _ShapeBuild.ShapeBuild_ReShape_swiginit(self,_ShapeBuild.new_ShapeBuild_ReShape(*args))", "def __init__(self, dimensions, origin, friend, foe):\n self.origin = Point(origin[0], origin[1])\n self.rec = MyRectangle(self.origin, dimensions)\n\n friend = Point(friend[0], friend[1])\n foe = Point(foe[0], foe[1])\n withinParam = self.rec.isPointInside(friend)\n withinParam &= self.rec.isPointInside(foe)\n if not withinParam: \n raise Exception('Point must be inside the rectangle') \n \n self.friend = friend\n self.foe = foe\n self.dim = tuple(dimensions)\n self.mirrors = {}\n return", "def __init__(self, x = 0, y = 0):\n self.x = x\n self.y = y", "def __init__(self,x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x1, y1):\n self.x = x1\n self.y = y1", "def __init__(self,x,y):\n self.x = x\n self.y = y", "def __init__(self):\n self.width = 0\n self.height = 0\n self.my_color = 0", "def __init__(self, x, y):\n\t\t\n\t\tself.x, self.y = x, y", "def __init__(self, width, height, x=0, y=0, id=None):\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, width, height, x=0, y=0, id=None):\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, x, y):\r\n self.x=x\r\n self.y=y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self, x, y):\n self.x = x\n self.y = y", "def __init__(self):\n\n self.X = None\n self.y = None", "def __init__(self):\n\n self.X = None\n self.y = None", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, area_extents: np.ndarray):\n self.area_extents = area_extents", "def __init__(self, min_x, min_y, max_x=0, max_y=0,\n width=0, height=0):\n self.min_x = min_x\n self.min_y = min_y\n if width > 0:\n self.max_x = min_x + width\n else:\n self.max_x = max_x\n if height > 0:\n self.max_y = min_y + height\n else:\n self.max_y = max_y", "def __init__(self, x, y):\n\t\tself.x = x\n\t\tself.y = y", "def __init__(self, x=0, y=0):\r\n self.x = x\r\n self.y = y", "def __init__(self, x=0, y=0):\r\n self.x = x\r\n self.y = y", "def __init__(self, x=0, y=0):\r\n self.x = x\r\n self.y = y", "def __init__(self, x=0, y=0):\r\n self.x = x\r\n self.y = y", "def __init__(self, x=0, y=0):\r\n self.x = x\r\n self.y = y", "def __init__(self):\n self.rows = None\n self.columns = None\n self.squares = None\n # max is useful as a way to track range for iteration, and also as a way\n # to track the maximum number in any spot.\n self.max = 0", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def __init__(self, x=0, y=0):\n self.x = x\n self.y = y", "def test_init_type(self):\n self.assertIsInstance(Rectangle(1, 1), Base)\n self.assertIsInstance(Rectangle(1, 1), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=None), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, 0, 0), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, 0, 0, 0), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=0), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=0.0), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=\"0\"), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=(0,)), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=[0]), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id={0}), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id={0: 0}), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=True), Rectangle)\n self.assertIsInstance(Rectangle(1, 1, id=type), Rectangle)", "def __init__(self, width, height, x=0, y=0, id=None):\n\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, **kwargs):\n self.__dict__.update(kwargs)\n self._calc_coords()", "def __init__(self, x_0, y_0, initX, initY,h=5):\n self.x_0=x_0\n self.y_0=y_0\n self.x_init=initX\n self.y_init=initY\n self.step=h", "def __init__(self, x, y):\n self._x = x\n self._y = y", "def __init__(self, x=0, y=0):\n self._x = x\n self._y = y", "def __init__(self, x, y, width, height, color):\n self._x = x\n self._y = y\n self._width = width\n self._height = height\n self._color = color", "def __init__(self, dimensions, origin, friend, foe):\n self.origin = Point(origin[0], origin[1])\n self.rec = Rectangle(self.origin, dimensions)\n\n friend = Point(friend[0], friend[1])\n foe = Point(foe[0], foe[1])\n withinParam = self.rec.isPointInside(friend)\n withinParam &= self.rec.isPointInside(foe)\n if not withinParam: \n raise Exception('Point must be inside the rectangle') \n \n self.friend = friend\n self.foe = foe\n self.dim = tuple(dimensions)\n self.mirrors = {}\n return", "def __init__(self, image, rect, x, y, xv, yv):\n self.image = image\n self.rect = rect\n self.rect.topleft = [x, y]\n self.x = x\n self.y = y\n self.xv = xv\n self.yv = yv", "def __init__(self, x, y, width, height):\n self.x1 = x\n self.y1 = y\n self.x2 = x + width\n self.y2 = y + height" ]
[ "0.7717153", "0.70783764", "0.7057518", "0.6912608", "0.6878885", "0.6844327", "0.6746747", "0.6710487", "0.66281", "0.6565641", "0.6465655", "0.6464897", "0.6464897", "0.64589643", "0.6448135", "0.6443847", "0.6419465", "0.64143485", "0.6397842", "0.6385662", "0.63642436", "0.63621897", "0.6339134", "0.6318041", "0.63065255", "0.63065255", "0.6304589", "0.62824017", "0.62519187", "0.6251419", "0.6232379", "0.6224298", "0.6221251", "0.6217572", "0.62167275", "0.62150073", "0.6213657", "0.62134665", "0.6202776", "0.6201325", "0.6198736", "0.61907697", "0.6181886", "0.61814845", "0.61782193", "0.6165092", "0.616446", "0.616446", "0.6160949", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.6159248", "0.61554086", "0.61554086", "0.6151356", "0.6151356", "0.6151356", "0.6149751", "0.61465955", "0.6145077", "0.61443096", "0.61443096", "0.61443096", "0.61443096", "0.61443096", "0.61431247", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.61412036", "0.6139314", "0.613642", "0.6133467", "0.61315304", "0.61260915", "0.61220366", "0.6118379", "0.6114255", "0.61032766", "0.6102755" ]
0.0
-1
function getter of width. Return the private attribute of width.
def width(self): return self.__width
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_width ( self ):\n return self.width", "def get_width(self):\r\n return self._width", "def get_width(self):\r\n return self._width", "def get_width(self):\r\n return self._width", "def get_width(self):\n return self.width", "def get_width(self):\n return self.__width", "def width(self):\n return self['width']", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def width(self):\n return (self.__width)", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(cls):\n return cls._width", "def width(self) :\n return self.m_width", "def getWidth(self):\n return self._width", "def GetWidth(self):\r\n\r\n return self._width", "def GetWidth(self):\r\n\r\n return self._width", "def getWidth(self):\n return self.width", "def getWidth(self):\n return self.width", "def width(self):\n # type: () -> float\n return self._width", "def width (self):\n return self._w", "def width(self) -> float:\n return self._width", "def getWidth(self) -> int:\n ...", "def width(self):\n return self._get_mean_and_samples_attribute('width')", "def width(self) -> int:\n return self.__width", "def width(self):\n return _libsbml.Dimensions_width(self)", "def width(self) -> int:\n return self._width", "def width(self):\n self._updateExtents()\n return self._mWidth", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def width(self) -> int:\n\t\treturn self._raw_result['data']['width']", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"width\")", "def get_width(self):\n return \"%s\" % self.width", "def width(self) -> int:\n\n return self._width", "def getWidth(self):\n return _libsbml.Dimensions_getWidth(self)", "def get_dimension_width(self):\n pass", "def width(self) -> int:", "def width(self) -> int:", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def width(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"width\")", "def widths(self):\n return self._widths", "def getWidth(self):\n return DEFAULT_WIDTH", "def width(self):\n return self._el._parent.execute_script(\"return arguments[0].width\", self._el)", "def width(self):\n return self._el._parent.execute_script(\"return arguments[0].width\", self._el)", "def w(self):\n return self.width", "def width(self):\n return self.__size[0]", "def width(self):\n\t\tpass", "def getWidth(self):\n return constants.DEFAULT_WIDTH", "def size(self):\n return (self.width)", "def getWidth(self):\n return len(self._data[0])", "def width(self) -> int:\n return self._image_data.width", "def width(self):\n if self._width_cache is None:\n cls = type(self)\n func = cls._width_extraction_fn()\n preprocessed_func = cls.preprocess_func(func)\n self._width_cache = self.apply(preprocessed_func)\n return self._width_cache", "def size(self):\n return self.width", "def size(self):\n return self.width" ]
[ "0.8650132", "0.85605687", "0.85605687", "0.85605687", "0.85120815", "0.8511572", "0.84871787", "0.8459629", "0.8459629", "0.8459629", "0.8459629", "0.83854336", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8377391", "0.8361523", "0.8324062", "0.8321407", "0.8296447", "0.8296447", "0.8293414", "0.8293414", "0.819018", "0.81378716", "0.81316835", "0.80612445", "0.80254", "0.80044657", "0.79831487", "0.7958947", "0.78679293", "0.784274", "0.784274", "0.784274", "0.7826188", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.781281", "0.78005105", "0.77959806", "0.7786415", "0.77456117", "0.7741897", "0.7741897", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.77352524", "0.7722526", "0.76643443", "0.7655125", "0.7655125", "0.7610244", "0.76077765", "0.7563747", "0.7493537", "0.74505705", "0.7386082", "0.738106", "0.73614144", "0.7358461", "0.7358461" ]
0.8384285
23
function setter of width is modified.
def width(self, width): if type(width) is not int: raise TypeError("width must be an integer") if width is 0 or width < 0: raise ValueError("width must be > 0") self.__width = width
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def set_width(self, width):\n self.__width = width", "def set_width(self, width):\n self.width = width", "def width(self, value):\n self._el._parent.execute_script(\"arguments[0].width = arguments[1]\", self._el, value)\n self.changed = True", "def width(self, value):\n self._el._parent.execute_script(\"arguments[0].width = arguments[1]\", self._el, value)\n self.changed = True", "def setWidth(self, width):\n if not self._width:\n self._width = int(width)", "def setWidth(self, width):\n self._reconfig(\"width\", width)", "def size(self, value):\n self.width = value", "def SetWidth(self, w):\r\n\r\n self._width = w", "def width(self, width):\n\n self._width = width", "def width(self, width):\n\n self._width = width", "def width(self, width):\n\n self._width = width", "def width(self, value):\n self.data_validator(\"width\", value)\n self.__width = value", "def SetWidth(self, width):\r\n\r\n self._width = width\r\n return self", "def width(self, value):\n self.validate_input(width=value)\n self.__width = value", "def width(self, value):\n self.integer_validator(\"width\", value)\n self.__width = value", "def set_width(self, *args):\n return _ida_hexrays.lvar_t_set_width(self, *args)", "def change_width(self, value):\n self.layer.edge_width = value\n self.widthSpinBox.clearFocus()\n self.setFocus()", "def setWidth(self, *args):\n return _libsbml.Dimensions_setWidth(self, *args)", "def width(self, value):\n if type(value) != int:\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def set_width(self, value):\n if value not in range(0, 19):\n raise SettingsError(\"Invalid width\")\n self._parser.set(\"settings\", \"width\", str(value))\n self._save()", "def width(self, value):\n if isinstance(value, int) is False:\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def width(self, width):\n if type(width) is not int:\n raise TypeError(\"width must be an integer\")\n if width <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = width", "def width(self):\n\t\tpass", "def set_pixel_width(self, width):\n # set in um\n self._dll.ShamrockSetPixelWidth(self._device, c_float(width))", "def width(self, width):\n if type(width) is not int:\n raise TypeError(\"width must be an integer\")\n elif width <= 0:\n raise ValueError(\"width must be > 0\")\n else:\n self.__width = width", "def width(self, value):\n if not isinstance(value, int):\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def width(self, w):\n if w < 0:\n w *= -1\n self._width = w", "def setSegmentWidth(self, width):\n for segment in self.segments:\n segment.width = width", "def width(self, value: int):\n self.tk_ref.geometry(f'{value}x{self.height}')", "def set_line_width(self, val):\n self.lwidth = val", "def width(self, width):\n self.col += width", "def setPointWidth(self, width):\n for point in self.points:\n point.width = width", "def set_width( self, width ):\n # label seems to be the controlling thing\n self.label_widget.configure( width = width )", "def size(self, val):\n self.width = val\n self.height = val", "def setWidth(self, *args):\n return _libsbml.BoundingBox_setWidth(self, *args)", "def field_width(self, field_width):\n\n self._field_width = field_width", "def field_width(self, field_width):\n\n self._field_width = field_width", "def width(self) -> int:", "def width(self) -> int:", "def on_body_width_add(self, val):\n val = max(0, int(val))\n self.mdl.cmp.s_add_width = val\n self.refresh_svg_canvas()", "def _define_width(self):\n if self.led_count < 5:\n min_width = 1\n max_width = self.led_count\n else:\n min_width = 5\n max_width = round(self.led_count / 2)\n self.width = LivingAnimation(\n label=\"Width\",\n initial_value=randint(min_width, max_width),\n value_range={'min': min_width, 'max': max_width},\n duration_range={'min': MIN_WIDTH_SPEED, 'max': MAX_WIDTH_SPEED}\n )", "def setFilmWidth(self, width):\r\n if mxs.classOf(self._nativePointer) == mxs.VRayPhysicalCamera:\r\n self._nativePointer.film_width = float(width)\r\n elif mxs.classOf(self._nativePointer) == mxs.Physical:\r\n self._nativePointer.film_width_mm = float(width)\r\n return True", "def SetWidth(*args, **kwargs):\n return _gdi_.Bitmap_SetWidth(*args, **kwargs)", "def opt_width(self, width):\n if width != \"auto\":\n width = int(width)\n self.conf[\"width\"] = width", "def width(self, number):\n self.validate_int(\"width\", number)\n if number <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = number", "def width(self, width):\n # type: (float) -> None\n\n if width is not None:\n if not isinstance(width, (float, int)):\n raise TypeError(\"Invalid type for `width`, type has to be `float`\")\n\n self._width = width", "def size(self, value):\n self.width = value\n self.height = value", "def pensize(self, width):\n self._penwidth = width", "def set_window_width(self, width):\n self.device.set_window_width(int(width))\n return \"OK\"", "def set_column_width(self, index, width):\n self.colwid[index] = width", "def set_width(self, w):\n if np.isscalar(w):\n w = np.ones(self._n_parameters) * w\n else:\n w = pints.vector(w)\n if len(w) != self._n_parameters:\n raise ValueError(\n 'Width for interval expansion must a scalar or an array'\n ' of length n_parameters.')\n if np.any(w < 0):\n raise ValueError('Width for interval expansion must be positive.')\n self._w = w", "def _refresh_width(self):\n self._width = curses.tigetnum('cols')\n self._writer = formatter.DumbWriter(self._output, maxcol=self._width)", "def _SetWidth(self, column_index, content_length):\n # Updates the width at position column_index to be the max of the existing\n # value and the new content's length, or this instance's max_column_width if\n # the value would be greater than max_column_width.\n if column_index == len(self._widths):\n self._widths.append(0)\n\n new_width = max(self._widths[column_index], content_length)\n if self._max_column_width is not None:\n new_width = min(self._max_column_width, new_width)\n self._widths[column_index] = new_width", "def setMinimumWidth( self, value ):\n self._minimumWidth = value", "def setBarWidth(w):\n dislin.barwth(w)", "def change_length(self, value):\n self.layer.length = value\n self.lengthSpinBox.clearFocus()\n self.setFocus()", "def _on_width_change(self, event=None):\n with self.layer.events.edge_width.blocker():\n self.widthSpinBox.setValue(self.layer.edge_width)", "def width(self, width: Union[int, float]):\n self._width = width\n\n vertices, triangles = self._generate_meshes(self.vectors, self._width)\n self._mesh_vertices = vertices\n self._mesh_triangles = triangles\n\n self.events.width()\n\n self.refresh()", "def getWidth(self) -> int:\n ...", "def width(self):\n return self['width']", "def get_new_width(self):\n return self.new_width", "def width(self, width=None):\n\n if width is None:\n return self._width\n else:\n if not isinstance(width, int) and not isinstance(width, float):\n raise TypeError(\"width must be numeric, not '%s'\" % width)\n self._width = width", "def change_tail_width(self, value):\n self.layer.tail_width = float(value) / 2.0", "def set_size(self, value='S'):\n upper = value.upper()\n\n if upper == 'M': # Medium: double height\n # size = 0x01\n # charHeight = 48\n # maxColumn = 32\n self.double_height_on()\n self.double_width_off()\n elif upper == 'L': # Large: double width and height\n # size = 0x11\n # charHeight = 48\n # maxColumn = 16\n self.double_height_on()\n self.double_width_on()\n else: # Small: standard width and height\n # size = 0x00\n # charHeight = 24\n # maxColumn = 32\n self.double_width_off()\n self.double_height_off()\n # writeBytes(ASCII_GS, '!', size)\n # prevByte = '\\n' # Setting the size adds a linefeed", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def set_auto_slit_width(self, index, width):\n assert(1 <= index <= 4)\n width_um = c_float(width)\n self._dll.ShamrockSetAutoSlitWidth(self._device, index, width_um)\n self._slit_width = width", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def width(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"width\")", "def min_width(self):\n ...", "def width (self):\n return self._w", "def SetBezelWidth(self, width):\n\n self.bezelWidth = width", "def band_width(self, band_width):\n self._band_width = band_width", "def set_size(self, length, width=None):\n\n length = float(length)\n try:\n width = float(width)\n except:\n pass\n if width is not None:\n self.ang_size = np.sqrt(length * width)\n else:\n self.ang_size = length\n\n ang_size_in_rad = self.ang_size / 60 * np.pi / 180\n self.sr = ct.angle_to_solid_angle(ang_size_in_rad)", "def resize(self, inc=True, require_val=False, val=None):\n if require_val: # Set to value\n if not val:\n val = self.default_width\n try:\n val = int(val)\n except:\n message = \"Library width must be an integer\"\n self.vimiv.statusbar.err_message(message)\n return\n self.width = val\n else: # Grow/shrink by value\n if not val:\n val = 20\n try:\n val = int(val)\n except:\n message = \"Library width must be an integer\"\n self.vimiv.statusbar.err_message(message)\n return\n if inc:\n self.width += val\n else:\n self.width -= val\n # Set some reasonable limits to the library size\n if self.width > self.vimiv.winsize[0] - 200:\n self.width = self.vimiv.winsize[0] - 200\n elif self.width < 100:\n self.width = 100\n self.scrollable_treeview.set_size_request(self.width, 10)\n # Rezoom image\n if not self.vimiv.image.user_zoomed and self.vimiv.paths:\n self.vimiv.image.zoom_to(0)", "def width(self) -> int:\n return self.__width", "def get_width(self):\n return \"%s\" % self.width", "def setColorBarWidth(width):\n dislin.widbar(width)" ]
[ "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8376353", "0.8339519", "0.83305824", "0.8157303", "0.8157303", "0.8136468", "0.81254727", "0.80290914", "0.8017782", "0.79475737", "0.79475737", "0.79475737", "0.7908671", "0.78506804", "0.7803521", "0.7734911", "0.7709035", "0.7681154", "0.75549847", "0.75059843", "0.7453663", "0.7426078", "0.7385333", "0.7367605", "0.7365256", "0.73605126", "0.73394483", "0.7312003", "0.7282484", "0.7280327", "0.7176663", "0.71559197", "0.71251655", "0.70493287", "0.7047536", "0.70411474", "0.70000094", "0.70000094", "0.69864", "0.69864", "0.6948094", "0.6925234", "0.6916372", "0.69133615", "0.68790615", "0.68612677", "0.68474674", "0.68435514", "0.6818802", "0.6816121", "0.6756017", "0.67291504", "0.6712287", "0.66465473", "0.66392684", "0.6598839", "0.65784293", "0.65582764", "0.6547952", "0.6540512", "0.65338403", "0.65332323", "0.65141773", "0.6469657", "0.6468706", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.64617395", "0.6439266", "0.6410641", "0.6410641", "0.6410641", "0.6408153", "0.6402857", "0.6402119", "0.63961124", "0.63916796", "0.63821083", "0.6379285", "0.63759637", "0.6371674" ]
0.7365873
34
function getter of height. Return the private attribute of height.
def height(self): return self.__height
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def height(self):\n return self[\"height\"]", "def height(self):\n return self[\"height\"]", "def height(self):\n return self.client.call('GET', self.name + 'height')", "def get_height(self):\r\n return self._height", "def get_height(self):\r\n return self._height", "def get_height(self):\r\n return self._height", "def get_height(self):\n return self.__height", "def get_height(self):\n return self._height", "def get_height(self):\n return self._height", "def get_height(self):\n return self._height", "def get_height(self):\n return self._height", "def height(self):\n return (self.__height)", "def height(self) :\n return self.m_height", "def height(self):\r\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def height(self):\n return self._height", "def getHeight(self):\n return self.height", "def getHeight(self):\n return self.height", "def getHeight(self):\n return self._height", "def height(self):\n # type: () -> float\n return self._height", "def height (self):\n return self._h", "def GetHeight(self):\r\n\r\n return self._height", "def get_height(self):\r\n return self.state['h']", "def height(self):\n\n return self.__height", "def height(self) -> int:\n return self.__height", "def height(self) -> int:", "def height(self) -> int:", "def height(self) -> int:", "def height(self) -> int:\n return self._height", "def height(self) -> int:\n return self._height", "def height(self):\n\t\tpass", "def height(self):\n self._updateExtents()\n return self._mHeight", "def height(self) -> int:\n\t\treturn self._raw_result['data']['height']", "def height(self):\n return self.__size[1]", "def height(self):\n return _libsbml.Dimensions_height(self)", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"height\")", "def height(self, height=None):\n\n if height is None:\n return self._height\n else:\n if not isinstance(height, int) and not isinstance(height, float):\n raise TypeError(\"height must be numeric, not '%s'\" % height)\n self._height = height", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"height\")", "def height(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"height\")", "def height(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"height\")", "def height(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"height\")", "def get_height(self,c):\r\n return self.h", "def getHeight(self):\n return _libsbml.Dimensions_getHeight(self)", "def height(self):\n return self._el._parent.execute_script(\"return arguments[0].height\", self._el)", "def height(self):\n return self._el._parent.execute_script(\"return arguments[0].height\", self._el)", "def __height__(self) -> int:\n return self._self_height", "def get_dimension_height(self):\n pass", "def height(self):\n yy = self.yy\n return max(yy) - min(yy)", "def height(self):\n return self.get_delta_value(self.Y_INDEX)", "def h(self):\n return self.height", "def get_height(self):\n\n return \"%s\" % self.height", "def get_height(self):\n return self.calc_height(self.root)", "def get_height(self, treenode=self):\n\t\treturn self.__get_height(treenode)", "def height(self):\n return self.row", "def __len__ (self):\n return self.height", "def get_height(self):\n if self.height:\n return self.height\n \n if not self._oembed:\n return ''\n \n return self._oembed.get('height', None)", "def height(self):\n return self.sheet.height", "def getHeight(self):\n return _libsbml.BoundingBox_getHeight(self)", "def height(self):\n return self.i_node.distance(self.n_node)", "def height(self):\n return self.y.max() - self.y.min()", "def height(self) -> int:\n return self._image_data.height", "def height(self):\n return self.maxy - self.miny" ]
[ "0.8661259", "0.8661259", "0.8615402", "0.8569597", "0.8569597", "0.8569597", "0.85450464", "0.85007274", "0.85007274", "0.85007274", "0.85007274", "0.8489411", "0.8420963", "0.83773327", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.83516467", "0.8345896", "0.8345896", "0.83408797", "0.8306138", "0.8294902", "0.8280053", "0.82615846", "0.8119412", "0.810428", "0.8060663", "0.8060663", "0.8060663", "0.80516064", "0.80516064", "0.79709554", "0.792639", "0.78811777", "0.7867436", "0.78529286", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.7839584", "0.775614", "0.7741404", "0.7741404", "0.7741404", "0.7741404", "0.7741404", "0.7741404", "0.7724607", "0.7724607", "0.7724607", "0.76705116", "0.76625955", "0.7651681", "0.7651681", "0.7645483", "0.76255554", "0.7589997", "0.758101", "0.75648886", "0.75332403", "0.7499225", "0.7476172", "0.7430906", "0.74171615", "0.7405788", "0.7359499", "0.73266256", "0.7324381", "0.731952", "0.73033226", "0.7303248" ]
0.8366778
24
function height of width is modified.
def height(self, height): if type(height) is not int: raise TypeError("height must be an integer") if height is 0 or height < 0: raise ValueError("height must be > 0") self.__height = height
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def height(self) -> int:", "def height(self) -> int:", "def height(self) -> int:", "def height(self):\n\t\tpass", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def setHeight(*args):", "def get_height(self):\n return 'width'", "def width(self) -> int:", "def width(self) -> int:", "def calc_size(self):\r\n self.height = HEIGHT_STATUS", "def getWidth(self) -> int:\n ...", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def resize(self, width: int, height: int):\n pass", "def size(self, val):\n self.width = val\n self.height = val", "def __len__(self):\n return (self.width * self.height) + (self.height - 1)", "def grow_rectangle(self, dwidth, dheight):\n self.width += dwidth\n self.height += dheight\n return(self.width, self.height)", "def height(self, value: int):\n self.tk_ref.geometry(f'{self.width}x{value}')", "def grow(self, delta_width, delta_height):\r\n self.width += delta_width\r\n self.height += delta_height", "def layout(self, width, height):\n raise NotImplementedError", "def get_height():\n return resize.transforms[1].size", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def width(self):\n\t\tpass", "def size(self, value):\n self.width = value\n self.height = value", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def footprint_height():", "def __len__(self):\n return self.width * self.height", "def __len__(self) -> int:\n return self.width * self.height", "def __call__(self, byclass, height, width):\n raise NotImplementedError", "def dimension(self):", "def perimRect(length, width):\n return 2 * (length + width)", "def setBoxsize(length,width,height):\n return length,width,height", "def size(self, value):\n self.width = value", "def _height_changed(self, value):\r\n # update internal data\r\n self._height = value", "def grow(self, delta_width, delta_height):\n self.width += delta_width\n self.height += delta_height", "def grow(self, delta_width, delta_height):\n self.width += delta_width\n self.height += delta_height", "def grow(self, delta_width, delta_height):\n self.width += delta_width\n self.height += delta_height", "def h(self):\r\n return self.size.y", "def body_resize(self):", "def set_height(self,c, h):\r\n self.h = h\r\n self.T1 = [[-self.R * np.sqrt(3) / (2*self.h), self.R / (2*self.h), 1],[0,-self.R/(self.h),1],[self.R * np.sqrt(3) / (2*self.h), self.R / (2*self.h), 1]]\r\n return self.h", "def get_height(self,c):\r\n return self.h", "def __height__(self) -> int:\n return self._self_height", "def dimensions():", "def updateHeight(self):\n leftHeight = self.left.height if self.left != None else 0\n rightHeight = self.right.height if self.right != None else 0\n self.height = max(rightHeight, leftHeight) + 1", "def size(self, width, height):\n self._p('[size] {} {}'.format(width, height))", "def get_dimension_height(self):\n pass", "def updateSize(self, *args):\n width = self.width.get()\n height = self.height.get()\n self.initialXScale.config(to=width)\n self.initialYScale.config(to=height)\n # error check that state is not outside bounds\n for ball, state in self.ballStates.items():\n if state[0] > width:\n state[0] = width\n if state[1] > height:\n state[1] = height", "def width(self, value: int):\n self.tk_ref.geometry(f'{value}x{self.height}')", "def set_size(self, width, height):\r\n \r\n self.image = pygame.transform.scale(self.image, (width, height))\r\n self.rect = self.image.get_rect()", "def image_pbox_height(self, width):\n return min(width, width / self.pbox_aspect_ratio)", "def change_size(self, width, height):\n oldw = float(self.size().width())\n oldh = float(self.size().height())\n\n if self.indicator_type == 'session':\n neww = int(oldw + oldw * (width / 100.0))\n if neww > 0:\n self.setFixedSize(neww, oldh)\n elif self.indicator_type == 'unit':\n newh = int(oldh + oldh * (height / 100.0))\n if newh > 0:\n self.setFixedSize(oldw, newh)\n\n self.set_font_size()", "def update_size(self):\n self.size = self.image.size\n self.width, self.height = self.size", "def height(self, height):\n self.row += height", "def set_height(height):\n resize.transforms[1].size = height", "def getWidth(self):\r\n width = 1\r\n if self.orientation == \"h\":\r\n width = self.size\r\n return width" ]
[ "0.7464202", "0.7464202", "0.7464202", "0.7087595", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6872465", "0.6845854", "0.67984444", "0.67984444", "0.678835", "0.6719456", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.6655328", "0.65973973", "0.6576047", "0.6502244", "0.6455866", "0.64412516", "0.6384104", "0.6382008", "0.6375974", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63582456", "0.63388103", "0.6337175", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.63359034", "0.6309214", "0.6305466", "0.62741315", "0.6267473", "0.6266243", "0.6265687", "0.625842", "0.6256298", "0.6252992", "0.621932", "0.621932", "0.621932", "0.61966735", "0.61807275", "0.615228", "0.6147583", "0.6145285", "0.6135598", "0.6132011", "0.61186373", "0.61031264", "0.6097465", "0.6090532", "0.60581946", "0.6056314", "0.60560375", "0.60448617", "0.6041571", "0.6032143", "0.60216284" ]
0.0
-1
function getter of x. Return the private attribute of x.
def x(self): return self.__x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def x(self): # same as 'doc' argument of property function\n print(\"getter of x called\")\n return self._x", "def getX(self):\n return self.__x", "def getX(self):\r\n\t\treturn self._x", "def getX(self):\n return self.x", "def x(self):\n return self[\"x\"]", "def X(self):\n return self.x\n pass", "def x(self):\n return (self.__x)", "def x(self):\n return self.x", "def x ( self ) :\n return self.xvar", "def get_x(self) -> int:\n return self.__x", "def GetX(self):\r\n\r\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def __get__(self, instance, owner):\n return self.xyz", "def findX(self):\n return self.x", "def xvar ( self ) :\n return self.__xvar", "def __getattr__(self, x):\r\n return self[x]", "def get(self, x):\n key = self.feature_def.key_func(x)\n return self.cache.get(key)", "def __getattr__(self, attr): # or does it ?\n return self.X[attr]", "def _get(self, (y, x)):\n return self[y][x]", "def x(self) -> int:\n return self._x", "def __get_x__(self):\n return self.Direction['x']", "def __getitem__(self, x):\n return self.data[self.name][x]", "def x(self):\n return self[0]", "def X(self):\n return self.__X", "def x(self) -> int:", "def independent(self):\n return self.x", "def get_x0(self, x0):\n pass", "def getX(self):\n return self.proj.getX()", "def __getitem__(self,key):\n return self.x[key]", "def getX(self):\n return _libsbml.BoundingBox_getX(self)", "def x(self):\n pass", "def test_getx(self):\n point = (1,2)\n x = utils.getx(point)\n self.assertEqual(1, x)", "def x(self):\r\n return self.unif[0]", "def get(self):\r\n return self.x, self.f, self.evals, self.x_geno", "def getXPoint(self, x):\n # Find the correct parameter\n t = (x - self.p0.x) / self.d.x\n return self.point(t)", "def x(self):\n return self._data[0]", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def _call(self, x):\n return self.constant", "def value(self, x):\n return self(x)", "def __getitem__(self, i):\n return self.__x[i]", "def Getxcoord(self):\n return self.x_coord", "def getattr(x, name):\n pass", "def getX(self):\n return self.components[0]", "def getX(self):\n return self.components[0]", "def getX(self):\n return self.position.getX()", "def autoprops_generated_getter(self):\n return getattr(self, private_property_name)", "def x(self) -> float:\n return self.data[0]", "def getValue(self, x):\n lowidx = self._findIndex(x)\n if lowidx == None:\n return 0.0\n\n lx, ly = self[lowidx]\n if lx == x:\n return ly\n\n highidx = lowidx +1\n if highidx == len(self):\n return 0.0\n\n hx,hy = self[highidx]\n\n m = (hy-ly)/(hx - lx)\n c = ly - (m*lx)\n return (m*x) + c", "def get_x(self):\n return self.posX", "def x(self):\n return self._arr[0]", "def x(self):\n if self._x is None:\n self.compute_coordinates()\n return self._x", "def xval(self, i):\n return self.x[i]", "def getX(self):\n return self.position[0]", "def _get_value(x):\n if str(type(x)) == \"<class 'autograd.numpy.numpy_boxes.ArrayBox'>\":\n return x._value\n else:\n return x", "def show_x(self):\n print(self.x)", "def get_xList(self):\n return self.__x", "def _get_x(self):\n return self.position.x", "def __getitem__(self, key):\n return self.xg[key]", "def get_ship_x(self):\n return self.x", "def pget(self, name):\n getter = attrgetter(name)\n attr = getter(self.pobj)\n return attr", "def pget(self, name):\n getter = attrgetter(name)\n attr = getter(self.pobj)\n return attr", "def getXCoordinate(self) -> float:\n return self.x_coord", "def _get(self, name):\n return object.__getattribute__(self, name)", "def _get(self, name):\n return object.__getattribute__(self, name)", "def get_value(self, x, y, z):\n\t\treturn self.data[ self.xyz_to_offset(x,y,z) ]", "def getXVelocity(self):\n return self.xvelocity", "def get(self, x, y):\n i = self.map[y][x]\n return self.get(i)", "def xyz(self):\n return self._xyz", "def x(self):\n return self._kml['x']", "def get( self, function ):\n return getattr( function, self.attribute, '' )", "def getPosition(self):\n return self.x", "def get(self):\n return self.x-self.offset", "def foo(self): # this is a declaration of instance method?\r\n print self.x", "def _fget(self):\n # type: (...) -> Any\n try:\n return getattr(self, private_attr)\n except AttributeError:\n raise AttributeError(\n \"'{}' object has no attribute '{}'\".format(\n _get_type_name(type_), attr\n )\n )" ]
[ "0.7959929", "0.75061965", "0.73572534", "0.72709286", "0.7234424", "0.72268146", "0.70876133", "0.7002792", "0.69983375", "0.6947473", "0.6877965", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.6841783", "0.678769", "0.66639054", "0.6611265", "0.6565127", "0.6547316", "0.65179485", "0.6516482", "0.65149856", "0.64952135", "0.6461971", "0.6455891", "0.6431554", "0.6361003", "0.6231818", "0.6211897", "0.62013996", "0.6192038", "0.61826146", "0.6170012", "0.61684245", "0.6166067", "0.6163815", "0.61260545", "0.6122227", "0.61201036", "0.61201036", "0.61056393", "0.609861", "0.606027", "0.600817", "0.5997505", "0.59954876", "0.59954876", "0.5989913", "0.5988409", "0.5953338", "0.59314495", "0.5921869", "0.5919831", "0.5916506", "0.5898538", "0.58929145", "0.58928126", "0.58878464", "0.5884893", "0.58835715", "0.5881963", "0.58789855", "0.5872751", "0.5872751", "0.5865374", "0.5858937", "0.5858937", "0.585457", "0.58404243", "0.5834995", "0.58337647", "0.5818832", "0.57890725", "0.5773634", "0.57719827", "0.5770372", "0.5750144" ]
0.7164953
14
function x of width is modified.
def x(self, x): if type(x) is not int: raise TypeError("x must be an integer") if x < 0: raise ValueError("x must be >= 0") self.__x = x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def width(self) -> int:", "def width(self) -> int:", "def width(self):\n return self.maxx - self.minx", "def width(self):\n return self.x.max() - self.x.min()", "def width(self):\n\t\tpass", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def width(self, width):\n self.col += width", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def width(self):\n xx = self.xx\n return max(xx) - min(xx)", "def getWidth(self) -> int:\n ...", "def set_width(self, *args):\n return _ida_hexrays.lvar_t_set_width(self, *args)", "def _width_extraction_fn(cls):\n pass", "def width(self):\n return self.get_delta_value(self.X_INDEX)", "def x_size(self):\n pass", "def width(self, w):\n if w < 0:\n w *= -1\n self._width = w", "def box_function(width_left, width_right, shift, sigma, x):\n\n prefactor = 2.0 * 0.25\n left = erf( (1.0/width_left * x + 1.0/width_left * shift + 1.0) / ( sigma * math.sqrt(2.0)) )\n right = erf( (1.0/width_right * x + 1.0/width_right * shift - 1.0) / ( sigma * math.sqrt(2.0)) )\n\n return prefactor * (left - right)", "def resize_coeff(x, new_x):\n return new_x / x", "def resize_coeff(x, new_x):\n return new_x / x", "def current_width(self, factor: Number=1) -> float:\n return self.width + self.spaces_width*factor", "def check_image_size(self, x):\n _, _, h, w = x.size()\n mod_pad_h = (self.window_size -\n h % self.window_size) % self.window_size\n mod_pad_w = (self.window_size -\n w % self.window_size) % self.window_size\n x = F.pad(x, (0, mod_pad_w, 0, mod_pad_h), 'reflect')\n return x", "def width(self, value):\n self._el._parent.execute_script(\"arguments[0].width = arguments[1]\", self._el, value)\n self.changed = True", "def width(self, value):\n self._el._parent.execute_script(\"arguments[0].width = arguments[1]\", self._el, value)\n self.changed = True", "def on_body_width_add(self, val):\n val = max(0, int(val))\n self.mdl.cmp.s_add_width = val\n self.refresh_svg_canvas()", "def wrap_x(self) -> int:\n return self._wrap_x", "def width(self, value: int):\n self.tk_ref.geometry(f'{value}x{self.height}')", "def get_i(self, x):\n return (x - self.__xmin) * (self.__width - 1) // (self.__xmax - self.__xmin)", "def pulse_width(self, value: int, /) -> None:", "def w(self):\r\n return self.size.x", "def weight4width(box_width,platformWidth,stairsLength,stepCount,stepWidth):\n if (platformWidth-stairsLength)<0:\n platformWidth = stairsLength + 50 #platform width must larger than stairs length ,the value is 50\n return platformWidth\n else:return platformWidth", "def roi_x_size():\n def r(x):\n return x & 0xFFF\n\n def w(x):\n return min(x, 0xFFF)\n return r, w", "def set_width(self, numax, a=0.66, b=0.88, factor=1.5):\n return a * numax**b * factor", "def width(self):\n if self._width_cache is None:\n cls = type(self)\n func = cls._width_extraction_fn()\n preprocessed_func = cls.preprocess_func(func)\n self._width_cache = self.apply(preprocessed_func)\n return self._width_cache", "def pulse_width(self) -> int:", "def width(self, value):\n if type(value) != int:\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def LD_F_Vx(self, x):\n\t\tself.I = self.FONTSET_BASE + self.V[x] * 5", "def column_xw(self, x):\n xp = x * self.column_width + x * self.column_gap\n if x < self.num_columns:\n w = min(self.max_x, self.column_width)\n else:\n w = self.max_x - xp\n return xp, w", "def setPointWidth(self, width):\n for point in self.points:\n point.width = width", "def x(self) -> int:", "def size(self, value):\n self.width = value", "def width(self, value):\n self.validate_input(width=value)\n self.__width = value", "def width(self):\n # type: () -> float\n return self._width", "def width(self, width):\n\n self._width = width", "def width(self, width):\n\n self._width = width", "def width(self, width):\n\n self._width = width", "def min_width(self):\n ...", "def width(self, value):\n self.integer_validator(\"width\", value)\n self.__width = value", "def width(self) -> int:\n return self._obj[self.x_dim].size", "def width(self):\n return np.copy(self._w)", "def _convert_bar_width(x, width=1, ncols=1):\n # WARNING: This will fail for non-numeric non-datetime64 singleton\n # datatypes but this is good enough for vast majority of cases.\n x_test = np.atleast_1d(_to_ndarray(x))\n if len(x_test) >= 2:\n x_step = x_test[1:] - x_test[:-1]\n x_step = np.concatenate((x_step, x_step[-1:]))\n elif x_test.dtype == np.datetime64:\n x_step = np.timedelta64(1, 'D')\n else:\n x_step = np.array(0.5)\n if np.issubdtype(x_test.dtype, np.datetime64):\n # Avoid integer timedelta truncation\n x_step = x_step.astype('timedelta64[ns]')\n return width * x_step / ncols", "def setWidth(self, *args):\n return _libsbml.Dimensions_setWidth(self, *args)", "def width(self, value):\n self.data_validator(\"width\", value)\n self.__width = value", "def get_grid_width(self):\r\n # replace with your code\r\n return self._width", "def calculate_width(self):\n return self.endX - self.startX", "def width (self):\n return self._w", "def width(self, value):\n if isinstance(value, int) is False:\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def _define_width(self):\n if self.led_count < 5:\n min_width = 1\n max_width = self.led_count\n else:\n min_width = 5\n max_width = round(self.led_count / 2)\n self.width = LivingAnimation(\n label=\"Width\",\n initial_value=randint(min_width, max_width),\n value_range={'min': min_width, 'max': max_width},\n duration_range={'min': MIN_WIDTH_SPEED, 'max': MAX_WIDTH_SPEED}\n )", "def x(self, value: int):\n if not (0 < value < SCREEN_WIDTH - self.width):\n self.dir_x = -self.dir_x\n self._x += abs(self._x - value) * self.dir_x", "def width(self, value):\n if not isinstance(value, int):\n raise TypeError(\"width must be an integer\")\n if value <= 0:\n raise ValueError(\"width must be > 0\")\n self.__width = value", "def _width_shift_(self, x: np.array, m: np.array) -> (np.array, np.array):\n # get a random sign for the shifting direction\n sign = np.random.randint(0, 2)\n shift_pix = np.random.randint(0, self.shift)\n x = shift(x, [0, sign*shift_pix])\n m = shift(m, [0, sign*shift_pix, 0], mode='nearest')\n return x,m", "def set_width(self, width):\n self.width = width", "def _extra_width(self) -> int:\n width = 0\n if self.box and self.show_edge:\n width += 2\n if self.box:\n width += len(self.columns) - 1\n return width", "def w(self):\n return self.width", "def set_width(self, width):\n self.__width = width", "def fun(self, x):\n\n raise NotImplementedError", "def width(self):\n return (self.__width)", "def pointlength(x):\n return 0.0", "def signals_width(self, width):\n self._p('[signals_width] {}'.format(width))", "def left_padding_width(self):\n ...", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width" ]
[ "0.6760154", "0.6760154", "0.64897656", "0.6473481", "0.64240944", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6394637", "0.6389916", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.6348413", "0.63296616", "0.6260283", "0.61709505", "0.61481786", "0.6139835", "0.60813534", "0.603881", "0.6009089", "0.6003303", "0.6003303", "0.5972013", "0.5957102", "0.59486556", "0.59486556", "0.59265256", "0.59063315", "0.59035504", "0.58874446", "0.5879178", "0.5866498", "0.5857092", "0.5848875", "0.5836917", "0.5821805", "0.5805931", "0.58022445", "0.57786405", "0.5772099", "0.5763834", "0.5761734", "0.5759205", "0.5755215", "0.5738399", "0.57318056", "0.57318056", "0.57318056", "0.57258475", "0.5724218", "0.5716829", "0.57082146", "0.5707587", "0.5702011", "0.5699173", "0.56947696", "0.5694414", "0.56760406", "0.5663169", "0.56606567", "0.5660044", "0.5657804", "0.5650346", "0.5643055", "0.5638404", "0.56267416", "0.56237984", "0.56231415", "0.5618893", "0.56166244", "0.5608235", "0.560259", "0.5591694", "0.5591694", "0.5591694", "0.5591694", "0.5591694", "0.5591694", "0.5591694", "0.5591694", "0.5591694" ]
0.0
-1
function getter of y. Return the private attribute of y.
def y(self): return self.__y
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_y(self):\n return self.__y", "def y ( self ) :\n return self.yvar", "def getY(self):\n return self.__y", "def getY(self):\r\n\t\treturn self._y", "def y(self):\n return (self.__y)", "def y(self):\n return self[\"y\"]", "def Y(self):\n return self.y\n pass", "def getY(self):\n y = self.getAttribute('y')\n kind = self.getKind()\n self._y = y if kind == 'pie' else None\n return self._y", "def getY(self):\n return self.y", "def getY(self):\n return self.y", "def GetY(self):\r\n\r\n return self._y", "def y(self):\n return self.y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n if self._y is None:\n self.compute_coordinates()\n return self._y", "def _get_y_data(self):\n return self.y(self.xs)", "def yvar ( self ) :\n return self.__yvar", "def findY(self):\n return self.y", "def Y(self):\n return self._Y", "def y(self):\n return self._arr[1]", "def y(self):\n return self[1]", "def y(self,) -> int:\n return self._y", "def y(self):\n return self._data[1]", "def y(self):\n return self.dataset.y", "def y(self, x):\n return x", "def ydata(self):\n return self._ydata", "def __get_y__(self):\n return self.Direction['y']", "def y(self) -> float:\n return self.data[1]", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def get_y(self, x):\n p, y = self.get_p_y(x)\n return y", "def y(self):\n return self.yn.func", "def getY(self):\n return self.components[1]", "def getY(self):\n return self.components[1]", "def getY(self):\n return self.proj.getY()", "def get_y():\n metadata = get_dataset_metadata(['shape_y', 'type_y'])\n return get_ndarray(name='y_original',\n arr_shape=metadata['shape_y'],\n arr_type=metadata['type_y'])", "def y2(self):\n return self._y2", "def y(self):\n return self.coords[1]", "def getY(self):\n return _libsbml.BoundingBox_getY(self)", "def y(self):\n return self._coords[1]", "def y(self):\n return self[:, 1]", "def yvec(self):\n return self._yvec", "def getYCoordinate(self) -> float:\n return self.y_coord", "def y(self):\n return _libsbml.Point_y(self)", "def y(self):\r\n return self.unif[1]", "def getY(self):\n return self.position[1]", "def getY(self):\n return self.labels[0]", "def y(self):\n pass", "def get_Y_gcn(self):\n\n return self.Y", "def y(self):\n return self._kml['y']", "def get_y(self):\n return self.coords[1]", "def y0(self):\n return self._y0", "def y_coord(self):\n\n return self.y0 + np.arange(self.ny) * self.dy", "def getYpos(self):\n return self.y", "def getY(self):\n return self.position.getY()", "def y(self):\n return np.array([f.y for f in self])", "def test_gety(self):\n point = (3,2.5)\n y = utils.gety(point)\n self.assertEqual(2.5, y)", "def y(self) -> ir.FloatingValue:\n return ops.GeoY(self).to_expr()", "def y(self) -> int:", "def gety(self,whichsol_,y): # 3\n if not isinstance(whichsol_,soltype): raise TypeError(\"Argument whichsol has wrong type\")\n if y is None: raise TypeError(\"Invalid type for argument y\")\n _copyback_y = False\n if y is None:\n y_ = None\n else:\n try:\n y_ = memoryview(y)\n except TypeError:\n try:\n _tmparr_y = array.array(\"d\",y)\n except TypeError:\n raise TypeError(\"Argument y has wrong type\")\n else:\n y_ = memoryview(_tmparr_y)\n _copyback_y = True\n else:\n if y_.format != \"d\":\n y_ = memoryview(array.array(\"d\",y))\n _copyback_y = True\n if y_ is not None and len(y_) != self.getnumcon():\n raise ValueError(\"Array argument y has wrong length\")\n res = self.__obj.gety(whichsol_,y_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n if _copyback_y:\n y[:] = _tmparr_y", "def get_stig_y(self):\n raise NotImplementedError", "def gety(self,whichsol_,y_):\n _y_minlength = self.getnumcon()\n if self.getnumcon() > 0 and y_ is not None and len(y_) != self.getnumcon():\n raise ValueError(\"Array argument y is not long enough: Is %d, expected %d\" % (len(y_),self.getnumcon()))\n if isinstance(y_,numpy.ndarray) and not y_.flags.writeable:\n raise ValueError(\"Argument y must be writable\")\n if y_ is None:\n raise ValueError(\"Argument y may not be None\")\n if isinstance(y_, numpy.ndarray) and y_.dtype is numpy.dtype(numpy.float64) and y_.flags.contiguous:\n _y_copyarray = False\n _y_tmp = ctypes.cast(y_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n elif y_ is not None:\n _y_copyarray = True\n _y_np_tmp = numpy.zeros(len(y_),numpy.dtype(numpy.float64))\n _y_np_tmp[:] = y_\n assert _y_np_tmp.flags.contiguous\n _y_tmp = ctypes.cast(_y_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_double))\n else:\n _y_copyarray = False\n _y_tmp = None\n \n res = __library__.MSK_XX_gety(self.__nativep,whichsol_,_y_tmp)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n if _y_copyarray:\n y_[:] = _y_np_tmp", "def y0(self):\n return self.params['y0']", "def getYF(self):\r\n return self.yFus;", "def yax(self):\n return self.__yax", "def get_stage_y(self):\n raise NotImplementedError", "def get_axis_y(self):\r\n return self.__y_axis", "def y(self, value=None):\n if isinstance(value, (int, float)):\n self[1] = value\n else:\n if value is not None:\n raise TypeError(\"Cannot be set to {}\".format(type(value)))\n return self[1]", "def y2(self):\n return self._y + self._y2", "def getYUnits(self): \n return self.__y_units__", "def yaxis ( self ) :\n return self.__yaxis", "def yaxis ( self ) :\n return self.__yaxis", "def yxal(self, i):\n return self.y[i]", "def y(self):\n return self.axes[0]", "def getOutY(self):\n pass", "def y(self):\n return self._translation[1, 0]", "def _get_y(self):\n return self.position.y", "def y_points(self):\n return self._y_points", "def getYLabel(self): \n return self.__y_label__", "def get_pos_y(self):\n return self.__pos_y", "def get_alien_y(self):\n return self.y", "def y(self) -> int:\n return self.data.y_centre >> 4", "def _derY(self, x, y):\n raise NotImplementedError()" ]
[ "0.8211156", "0.8203353", "0.8118901", "0.81069934", "0.809056", "0.80078703", "0.7958397", "0.79568297", "0.7908904", "0.7908904", "0.7864945", "0.7833086", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.78205884", "0.77946615", "0.77425015", "0.77230114", "0.7664596", "0.76369137", "0.75952613", "0.7583881", "0.75798416", "0.75769126", "0.7520089", "0.751493", "0.75038755", "0.74686956", "0.7462186", "0.745089", "0.745089", "0.7439703", "0.7311722", "0.7293506", "0.7293506", "0.72710556", "0.7251976", "0.7246464", "0.7177297", "0.7174708", "0.71735084", "0.7172196", "0.71558154", "0.71416146", "0.7121386", "0.7105822", "0.7091369", "0.7072956", "0.7066073", "0.7045334", "0.704339", "0.70395386", "0.6980539", "0.6977887", "0.69515324", "0.6934457", "0.6922584", "0.68942463", "0.6881755", "0.6878211", "0.6875204", "0.6873466", "0.6863155", "0.6859053", "0.68381655", "0.6835606", "0.68186134", "0.6806644", "0.6800183", "0.6784048", "0.6763934", "0.6755891", "0.6755891", "0.67476463", "0.6737001", "0.67349845", "0.67297804", "0.6722892", "0.67060673", "0.66926354", "0.66722596", "0.66679657", "0.6650035", "0.66463745" ]
0.80254793
13
function y of width is modified.
def y(self, y): if type(y) is not int: raise TypeError("y must be an integer") if y < 0: raise ValueError("y must be >= 0") self.__y = y
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def y_size(self):\n pass", "def y(self) -> int:", "def y(self):\n pass", "def y(self, x):\n return x", "def wrap_y(self) -> int:\n return self._wrap_y", "def pixelsizey(self) -> ErrorValue:\n return ErrorValue(self._data['YPixel'], self._data.setdefault('YPixelError',0.0))", "def squareY(self):\n \n ## square the y portion element-wise ##\n self.y = [yElement**2 for yElement in self.y]", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def getWidth(*args):", "def y(self, value):\n if not (0 < value < SCREEN_HEIGHT - self.height):\n self.dir_y = -self.dir_y\n self._y += abs(self._y - value) * self.dir_y", "def h(self):\r\n return self.size.y", "def flipy(y):\n return -y + 600", "def n_y(self, level):\n resolution = self.resolution(level)\n return (self.y_extent // resolution + 63) // 64", "def yline(y,farright, width, dash, grayamount):\r\n aline([[0,y],[farright,y]],width, dash, grayamount)", "def y(self) -> int:\n return self.data.y_centre >> 4", "def setY(self, y):\r\n\t\tself._y=y", "def y(self,) -> int:\n return self._y", "def roi_y_size():\n def r(x):\n return x & 0xFFF\n\n def w(x):\n return min(x, 0xFFF)\n return r, w", "def getWidth(self) -> int:\n ...", "def ysize(self):\n bbox = self.bbox\n return bbox[1][1] - bbox[0][1]", "def width(self) -> int:", "def width(self) -> int:", "def y ( self ) :\n return self.yvar", "def toTk(self,y):\r\n if y == maxValue: return 0\r\n tk_y = Size\r\n if y != minValue:\r\n tk_y -= y\r\n return tk_y", "def set_y(self, new_y):\r\n self.y = new_y", "def set_y(self, y: float):\n self.y = y", "def add_y(self, y, add):\n return (y + add) % self.y_len", "def y1(self, level):\n resolution = self.resolution(level)\n y1 = self.y0(level) + 64\n y1[-1] = (self.y_extent + resolution - 1) // resolution\n return y1", "def ydim(self):\n return len(self._y)", "def width(self, width):\n self.col += width", "def yscale(value):\n impl.yscale(**locals())", "def ymarg(self, y):\n return sum(self.a[y])", "def setY(self, y):\n self.y = y\n pass", "def cy(y):\n return pic_height + ch(y - global_min_y)", "def width(self):\n return self.maxx - self.minx", "def y(self):\n return self.y", "def width(self):\n xx = self.xx\n return max(xx) - min(xx)", "def setSize(self, y, h):\n if (h <= 0.0):\n self.ovflRect.hide()\n self.canvas.setHeight(y)\n else:\n self.ovflRect.setRect(0, y, self.mainWidth, h)\n self.ovflRect.show()\n self.canvas.setHeight(y + h)", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def disp_y(self, *args, **kwargs) -> Any:\n pass", "def _apply_y(self, state, axes, **kwargs):\n return 1j * self._apply_x(self._apply_z(state, axes), axes)", "def width(self):\n return self.x.max() - self.x.min()", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def setWidth(*args):", "def y(self, y):\n if type(y) is not int:\n raise TypeError(\"y must be an integer\")\n elif y < 0:\n raise ValueError(\"y must be >= 0\")\n else:\n self.__y = y", "def _set_y_size(self):\n self._level_gen.size = (self._level_gen.size[X],\n self._level_size_y_spinbox.value(),\n self._level_gen.size[Z])\n self._refresh_view()", "def w(self):\r\n return self.size.x", "def set_ly(self):\r\n self._ly = self.dy * self.ny - self.oy", "def y(self):\n return np.sum(self.bbox, 0)[1] / 2", "def y(self, value):\n self.validate_input(y=value)\n self.__y = value", "def vec_y(self):\t\t\r\n if self.oy != 0:\r\n ov = self.oy\r\n lv = self.self.ly + self.oy\r\n else:\r\n ov = self.dy / 2\r\n lv = self.ly\r\n\r\n yv = \"\"\r\n for num in np.arange(ov, lv, self.dy):\r\n yv += str(num) + \" \"\r\n\r\n return yv", "def y(self):\n return (self.__y)", "def width(self):\n\t\tpass", "def pos_y(self, *args, **kwargs) -> Any:\n pass", "def get_y(self):\n return self.__y", "def offset_y(self, y: int):\n self.tk_ref.geometry(f'{self.width}x{self.height}+{self.offset_x}+{y}')", "def square_fn(pars, x_axis):\n # vector to fill\n y_axis = np.zeros(len(x_axis))\n # pars = [x1,x2,y1]\n # find indexs in x axis\n idx1 = np.argmin(abs(x_axis-pars[0]))\n idx2 = np.argmin(abs(x_axis-pars[1]))\n # fill y axis and return \n y_axis[0:idx1] = pars[2]\n y_axis[idx1:idx2+1] = pars[3]\n y_axis[idx2+1:] = pars[4]\n return y_axis", "def width(self):\n return np.copy(self._w)", "def change_tail_width(self, value):\n self.layer.tail_width = float(value) / 2.0", "def y2(self):\n return self._y + self._y2", "def normalize_y(y: np.ndarray) -> np.ndarray:\n #print(\"enter bartpy/bartpy/data.py Target normalize_y\")\n \n y_min, y_max = np.min(y), np.max(y)\n output = -0.5 + ((y - y_min) / (y_max - y_min))\n #print(\"-exit bartpy/bartpy/data.py Target normalize_y\")\n return output", "def resizeY(self,yMin=None,yMax=None,dryrun=False):\n if yMin is None:\n yMin = self.y[0]\n if yMax is None:\n yMax = self.y[-1]\n Ly_specified = yMax - yMin\n Ly = self.y[-1] - self.y[0]\n if Ly_specified > Ly:\n print('Specified y range', (yMin,yMax),\n 'greater than', (self.y[0],self.y[-1]))\n return\n\n if dryrun: sys.stdout.write('(DRY RUN) ')\n print('Resizing fluctuations field in y-dir from [',\n self.y[0],self.y[-1],'] to [',yMin,yMax,']')\n print(' before:',self.U.shape)\n \n newNY = int(np.ceil(Ly_specified/Ly * self.NY))\n Unew = self.U[:,:,:newNY,:]\n Tnew = self.T[ :,:newNY,:]\n print(' after:',Unew.shape)\n if not dryrun:\n self.U = Unew\n self.T = Tnew\n self.NY = newNY\n\n ynew = yMin + np.arange(newNY,dtype=self.realtype)*self.dy\n if not dryrun:\n print('Updating y coordinates')\n self.y = ynew\n else:\n print('(DRY RUN) y coordinates:',ynew)", "def offset_y(self, X, y):\n X, y = self.check_consistent_params(X, y)\n if len(y.shape) == 1:\n offset = len(y) - X.shape[0]\n return y[offset:]\n else:\n offset = len(y[0]) - X.shape[0]\n return y[0, offset:]", "def y(self, value):\n self.data_validator(\"y\", value)\n self.__y = value", "def set_y(self, y):\n self._y = y", "def yule_y(self):\n a, c, d, b = self.to_ccw()\n p1, q1 = a + b, c + d\n p2, q2 = a + c, b + d\n n = a + b + c + d\n\n if n == 0:\n return np.nan\n elif p1 == n:\n # c and d are zero\n return _div(sqrt(a) - sqrt(b), sqrt(a) + sqrt(b))\n elif p2 == n:\n # b and d are zero\n return _div(sqrt(a) - sqrt(c), sqrt(a) + sqrt(c))\n elif q1 == n:\n # a and b are zero\n return _div(sqrt(d) - sqrt(c), sqrt(d) + sqrt(c))\n elif q2 == n:\n # a and c are zero\n return _div(sqrt(d) - sqrt(b), sqrt(d) + sqrt(b))\n\n ad = a * d\n bc = b * c\n\n return _div(sqrt(ad) - sqrt(bc), sqrt(ad) + sqrt(bc))", "def y(self):\n return self[1]", "def on_body_width_add(self, val):\n val = max(0, int(val))\n self.mdl.cmp.s_add_width = val\n self.refresh_svg_canvas()" ]
[ "0.6572785", "0.63723826", "0.6260383", "0.62368536", "0.6097661", "0.5998206", "0.59495974", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.5924994", "0.59168607", "0.5914221", "0.58833057", "0.5881061", "0.5879977", "0.58728963", "0.585327", "0.583568", "0.5824275", "0.57992995", "0.57949907", "0.5786505", "0.5786505", "0.57801354", "0.5774218", "0.5762889", "0.5760641", "0.5749115", "0.5738653", "0.5712853", "0.57100976", "0.5689969", "0.568806", "0.56796676", "0.56713706", "0.5656079", "0.5640801", "0.5640658", "0.5626251", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.5625824", "0.56174856", "0.5611228", "0.5600313", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5597129", "0.5591289", "0.5568496", "0.55611485", "0.555732", "0.5554991", "0.55473536", "0.55428153", "0.55419266", "0.55339164", "0.5530603", "0.5525481", "0.55244285", "0.55195034", "0.55112135", "0.54984194", "0.5495984", "0.54881424", "0.5485595", "0.5483416", "0.54808974", "0.5474421", "0.5449555", "0.5447886", "0.5443492" ]
0.5659497
48
method that find the area of a rectangle. Return the area of a rectangle.
def area(self): return self.width * self.height
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rect_area(rect):\n return rect[2] * rect[3]", "def rectangle_area(base, height):\n return (base * height)", "def rectangle_area(width : number, height : number) ->number:\n area = width*height\n #print(\"The area of rectangle is =\", area, \"sq. units\")\n return area", "def area_rect(w, h):\n return w * h", "def rectangle_area(coordinates):\n return (coordinates[2] - coordinates[0]) * (coordinates[3] - coordinates[1])", "def findArea(self):\n\n a, b = self.sides\n area = a * b\n print(f\"Are of rectangle is: {area}\")", "def _area(bounds):\n return (bounds[0, 1] - bounds[0, 0]) * (bounds[1, 1] - bounds[1, 0])", "def rectArea(base, height):\n return base * height", "def rectangle_area(side1, side2):\n return float(side1) * float(side2)", "def areaRect(length, width):\n return length * width", "def area(width, height):\n return width * height", "def area(self):\n\n bbox = self.bbox\n area = Box.calculate_bbox_area(bbox, bbox_type=self.bbox_type)\n\n return area", "def test_rectangle_area(self):\n self.assertEqual(12, rectangle_area(\n self.values['base'], self.values['height']))", "def area(self):\n num_rows = self.row_end - self.row_start\n num_cols = self.col_end - self.col_start\n area = num_rows*num_cols\n return area", "def test_rectangle(self):\n result = shape_area.rectangle_area(6,7)\n self.assertEqual(result,26)", "def area(self):\n area = self.__width * self.__height\n return area", "def test_area(self):\r\n rect = Rectangle(30, 50, 130, 60)\r\n assert rect.area == 100 * 10\r\n\r\n rect = Rectangle(10.5, 20.7, 11.2, 50.6)\r\n assert abs(rect.area - 20.93) < 1e-10\r\n\r\n rect = Rectangle(-10, -20, 10, 60)\r\n assert rect.area == 20 * 80", "def area(self):\n if isinstance(self.crs, GeographicalCRS):\n major_axis = self.crs.ellipsoid.a\n minor_axis = self.crs.ellipsoid.b\n\n area = 0.0\n if major_axis == minor_axis: # Sphere\n for seg in self.segment_tuples:\n x1, y1 = seg[0]\n x2, y2 = seg[1]\n area += geodesy.spherical_area(major_axis, x1, y1, x2, y2)\n\n else:\n for seg in self.segment_tuples:\n x1, y1 = seg[0]\n x2, y2 = seg[1]\n area += geodesy.ellipsoidal_area(major_axis, minor_axis,\n x1, y1, x2, y2)\n\n else:\n # Cartesian coordinate systems\n x, y = self.coordinates\n x0 = np.min(x)\n area = (0.5*(x[0] + x[-1]) - x0) * (y[0] - y[-1])\n area += sum((0.5*(x[i+1]+x[i]) - x0) * (y[i+1] - y[i]) for i in range(len(x)-1))\n return abs(area) - sum(sub.area for sub in self.subs)", "def area(base, height):\n\n return base * height", "def get_rect_area(length, width):\n length = (str)(length)\n width = (str)(width)\n if((length.isnumeric()) and (length.isnumeric())):\n length = (float)(length)\n width = (float)(width)\n area = length * width\n else:\n area = \"Invalid input, length and width must be numeric value\"\n return area", "def area(x, y):\n return x*y/2", "def test_area(self):\n r1 = Rectangle(3, 2)\n self.assertEqual(r1.area(), 6)\n\n r2 = Rectangle(2, 10)\n self.assertEqual(r2.area(), 20)\n\n r3 = Rectangle(10, 10)\n self.assertEqual(r3.area(), 100)", "def area_rectangle(length: float, width: float) -> float:\r\n if length < 0 or width < 0:\r\n raise ValueError(\"area_rectangle() only accepts non-negative values\")\r\n return length * width", "def box_area(box):\n x1, y1, x2, y2 = box\n w = x2 - x1\n h = y2 - y1\n return float(w) * h", "def area_of(self, left_top, right_bottom):\n hw = np.clip(right_bottom - left_top, 0.0, None)\n return hw[..., 0] * hw[..., 1]", "def area(self):\n area = self.__length * self.__width\n\n return area", "def areaOfQuadrilateral(rect):\n rect = np.array(rect)\n A = rect[..., 0, :]\n B = rect[..., 1, :]\n C = rect[..., 2, :]\n D = rect[..., 3, :]\n return 0.5 * np.abs((A[..., 1] - C[..., 1]) * (D[..., 0] - B[..., 0]) + (B[..., 1] - D[..., 1]) * (A[..., 0] - C[..., 0]))", "def area(self):\n return (self.baselength1 + self.baselength2)*self.height/2", "def area(self):\n return(self.__width * self.__height)", "def area(self):\n return self.__height * self.__width", "def area(self):\n return self.__height * self.__width", "def area(self):\n return self.__height * self.__width", "def area(self):\n area = 0\n\n for room in self.rooms:\n area += room.polygon.area()\n\n for wall in self.walls:\n area += wall.polygon.area()\n\n return area", "def area(self):\n\n return (self.x1 - self.x0) * (self.y1 - self.y0)", "def rect(l, b):\n print(\"Area of rectangle is\", l * b)", "def area(x1, y1, x2, y2, x3, y3):\n return abs((x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2)) / 2.0)", "def area(self):\n area = self.__size * self.__size\n return area", "def area(self):\n\t\treturn self.width * self.height", "def area(self):\n return (self.__width * self.__height)", "def area(self):\n return (self.__width * self.__height)", "def area(self):\n\t\treturn self.width() * self.height()", "def area(self):\n return self.__width * self.__height", "def area(self):\n return self.__width * self.__height", "def area(self):\n return self.__width * self.__height", "def area(self):\n return self.__width * self.__height", "def area(self):\n return self.__width * self.__height", "def area(self):\n\t\treturn self.height * self.height", "def area(self):\n area = self.__size * self.__size\n return(area)", "def get_area(self):\n ### Original\n from pyresample.spherical_geometry import get_polygon_area\n\n return get_polygon_area(self.corners)\n ### End Original\n #from .spherical import SphPolygon\n #shell()\n #log.info('RUNNING SPHERICAL in get_area')\n\n #return SphPolygon(self.corners).area", "def area(symbol):\n return (symbol.bounding_box.vertices[2].x - symbol.bounding_box.vertices[0].x) * (\n symbol.bounding_box.vertices[2].y - symbol.bounding_box[0].y)", "def area(self):\n if len(self.exterior) < 3:\n raise Exception(\"Cannot compute the polygon's area because it contains less than three points.\")\n poly = self.to_shapely_polygon()\n return poly.area", "def area(self):\n return self._width * self._height", "def area_rectangulo(b,h):\n area = b*h\n print \"El area es: \",area", "def area(self):\n return _property_op(arctern.ST_Area, self)", "def extract_area(data,box):\n if box is None or box[0] is None or box[1] is None or box[1][0] - box[0][0] == 0 or box[1][1] - box[0][1] == 0:\n box = ((0,0),(10,10));\n area = ut.extract_area(data['frame'],*box,data['uc'],256);\n return area;", "def area_of(left_top, right_bottom):\n hw = np.clip(right_bottom - left_top, 0.0, None)\n return hw[..., 0] * hw[..., 1]", "def area_of(left_top, right_bottom):\n hw = np.clip(right_bottom - left_top, 0.0, None)\n return hw[..., 0] * hw[..., 1]", "def area(self):\n return (self.width * self.height)", "def calculate_bbox_area(bbox, rows, cols):\n bbox = denormalize_bbox(bbox, rows, cols)\n x_min, y_min, x_max, y_max = bbox[:4]\n area = (x_max - x_min) * (y_max - y_min)\n return area", "def get_area(self):\n return self._area", "def test_area3(self):\n r3 = Rectangle(8, 7, 0, 0, 12)\n self.assertEqual(r3.area(), 56)", "def calculatearea(self):\r\n return self.width * self.height", "def rectangle_surface_area(a,b):\n return (a*b)", "def area(self):\n\n return self.__width * self.__height", "def area(self):\n\n return self.__width * self.__height", "def area(self):\n\n return self.__width * self.__height", "def area(self):\n return self._ned_shape.area", "def area(self):\n return self.width*self.height", "def area(self):\n return self.width() * self.height()", "def boundingBoxArea(box):\n return (box[2] - box[0] + 1) * (box[3] - box[1] + 1)", "def calculate_bbox_area(bbox: BoxType, rows: int, cols: int) -> float:\n bbox = denormalize_bbox(bbox, rows, cols)\n x_min, y_min, x_max, y_max = bbox[:4]\n area = (x_max - x_min) * (y_max - y_min)\n return area", "def get_area(self):\n raise NotImplementedError()", "def area(self) -> float:\n raise NotImplementedError", "def area(self):\r\n return self.width * self.height", "def total_area(self):\n return numpy.prod([r[1] - r[0] for r in self.range_])", "def test_area2(self):\n r2 = Rectangle(2, 10)\n self.assertEqual(r2.area(), 20)", "def test_area1(self):\n r1 = Rectangle(3, 2)\n self.assertEqual(r1.area(), 6)", "def area(self):\n return _cantera.wall_area(self.__wall_id)", "def area(self):\n area = 0\n last = self._coordinates[-1]\n for c in self._coordinates:\n area += (last[0] * c[1] - last[1] * c[0])\n last = c\n return float(\"{:.2f}\".format(abs(area) * 0.5))", "def area(self):\n area = self._lengths[0] * self._lengths[1] * math.sin(math.radians(self._angles[0]))\n area += self._lengths[2] * self._lengths[3] * math.sin(math.radians(self._angles[0]))\n return float('{:.2f}'.format(area * 0.5))", "def area(\n self):\n pi = numpy.pi\n area0 = 4.0 * pi / 8.0\n areadiv = 4.0 ** self.depth\n area = area0 / areadiv * (180.0 / pi) ** 2\n return area", "def area(self):\n return 0.5*np.abs(np.dot(self.x,np.roll(self.y,1))-np.dot(self.y,np.roll(self.x,1)))", "def area(self, boxes):\n with tf.name_scope('area'):\n ymin, xmin, ymax, xmax = tf.unstack(boxes, axis=1)\n return (ymax - ymin) * (xmax - xmin)", "def rectangle_handler(string):\n\n a = int(input(string[0]))\n b = int(input(string[1]))\n\n rectangle = Shape.Rectangle(a, b)\n\n print(string[2] + rectangle.display() + \" is \" + str(rectangle.getarea()))", "def calculate_area(length: int, width: int) -> int:\n\n # process\n area = length * width\n\n # output\n return area", "def area(length, hypotenuse):\n side = int(length)* hypotenuse\n return round(side*2, 2) # returns the rounded area of the roof.", "def test_area(self):\n r = Rectangle(5, 6)\n self.assertEqual(r.area(), 30)\n w = randrange(10) + 1\n h = randrange(10) + 1\n r.width = w\n r.height = h\n self.assertEqual(r.area(), w * h)\n w = randrange(10) + 1\n h = randrange(10) + 1\n r = Rectangle(w, h, 7, 8, 9)\n self.assertEqual(r.area(), w * h)\n w = randrange(10) + 1\n h = randrange(10) + 1\n r = Rectangle(w, h, y=7, x=8, id=9)\n self.assertEqual(r.area(), w * h)", "def area(boxes):\n y_min, x_min, y_max, x_max = np.split(boxes, 4, axis=-1)\n return np.squeeze((y_max - y_min) * (x_max - x_min), [1])", "def area(self):\n\n raise Exception(\"area() is not implemented\")", "def area(self):\n\n raise Exception(\"area() is not implemented\")", "def area(self):\n return self.length*self.length", "def area(self):\n raise Exception('area() is not implemented')", "def area(self):\n raise Exception('area() is not implemented')" ]
[ "0.82248306", "0.8159178", "0.8146606", "0.8018593", "0.7922524", "0.78896415", "0.77728826", "0.77609915", "0.76662093", "0.7627938", "0.7534685", "0.74978864", "0.7481451", "0.7436784", "0.73720986", "0.73571813", "0.73135287", "0.72804546", "0.72797346", "0.7272335", "0.72559065", "0.723764", "0.7141346", "0.71124434", "0.7103146", "0.70858145", "0.7070827", "0.70637953", "0.70604783", "0.70496535", "0.70496535", "0.70496535", "0.7046071", "0.703365", "0.7032656", "0.7002676", "0.6977217", "0.697358", "0.69646233", "0.69646233", "0.69620585", "0.6948561", "0.6948561", "0.6948561", "0.6948561", "0.6948561", "0.6946075", "0.6943499", "0.69406545", "0.69399714", "0.6937224", "0.69273156", "0.69265264", "0.69158363", "0.6915065", "0.6910062", "0.6910062", "0.68829113", "0.6882775", "0.68826", "0.6875377", "0.6870117", "0.68655956", "0.6861336", "0.6861336", "0.6861336", "0.68519527", "0.6850017", "0.6844201", "0.68397355", "0.68193597", "0.68192023", "0.6808798", "0.6808393", "0.67830426", "0.67491966", "0.67372966", "0.6733927", "0.67290115", "0.6711308", "0.6699273", "0.66976255", "0.6692107", "0.6677661", "0.66668", "0.6636092", "0.66247", "0.6621081", "0.6614532", "0.6614532", "0.6597552", "0.6593151", "0.6593151" ]
0.68457466
74
method that print in screen a rectangle. with the character "".
def display(self): for _jumpline in range(self.y): print(end="\n") for _height in range(self.height): for _space in range(self.x): print(" ", end="") for _width in range(self.width): print("#", end="") print(end="\n")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rectangle(height,width):\n for row in range(height):\n for column in range(width):\n print(CHAR, end = '')\n print()", "def display(self):\n row = (' ' * self.__x) + (Rectangle.print_symbol * self.__width) + '\\n'\n print(('\\n' * self.__y) + (row * self.__height), end=\"\")", "def draw(self):\n res = ''\n # ANSI code to clear the screen\n #res += chr(27) + \"[2J\"\n for position, value in enumerate(self.board.tttboard):\n if value is None:\n res += str(position)\n #sys.stdout.write(str(position))\n else:\n res += str(value)\n #sys.stdout.write(str(value))\n\n if (position + 1) % 3 != 0:\n res += str('|')\n #sys.stdout.write('|')\n else:\n #print ''\n\n res += str('\\n')\n if position == 2 or position == 5:\n #print '-' * 5\n\n res += '-' * 5\n res += str('\\n')\n return res", "def printScreen(screenRepr):\n for y, line in enumerate(screenRepr):\n for x, char in enumerate(line):\n if(\"part\" in char):\n MAP_SCREEN.addstr(y + 1, x + 1, \" \", curses.color_pair(10))#White body\n elif(\"head\" in char):\n MAP_SCREEN.addstr(y + 1, x + 1, \" \", curses.color_pair(int(char[4:])))\n else:\n MAP_SCREEN.addstr(y + 1, x + 1, char)\n MAP_SCREEN.refresh()", "def display(self):\n prow = self.__width * '#'\n nstr = self.y * \"\\n\"\n for x in range(self.__height):\n nstr += self.x * \" \"\n nstr += prow\n if x == (self.__height - 1):\n break\n nstr += \"\\n\"\n print(nstr)", "def display(self):\n for space in range(self.y):\n print('')\n for row in range(self.height):\n for x in range(self.x):\n print(' ', end='')\n for col in range(self.width):\n print('#', end='')\n print('')", "def display(self):\n mg_w = self.width\n mg_h = self.height\n str_to_prt = \"\\n\" * self.y + (\" \" * self.x + \"#\" * mg_w + '\\n') * mg_h\n print(str_to_prt[:-1])", "def Draw(self):\n print ( 10*\"*\")\n print (\"Player \" + self.character + \" says:\")\n print (\"It's a Draw\")\n print ( 10*\"*\")", "def print(self):\n board_string = ''\n for y in range(self.size):\n if y == 0:\n board_string += '+ '\n for x in range(self.size):\n board_string += str(x+1) + ' '\n board_string += '\\n'\n board_string += (1+3*self.size)*'-'\n board_string += '\\n'\n board_string += str(y+1)+'|'+y*' '\n \n for x in range(self.size):\n board_string += ' '\n if self.board[y,x] == HexBoard.BLUE:\n board_string += self.char_player1\n elif self.board[y,x] == HexBoard.RED:\n board_string += self.char_player2\n else: \n board_string += self.char_empty\n board_string += '\\n'\n board_string = board_string.strip()\n\n print(board_string)", "def print(self,text,position=None,size=None,color=mycolors.WHITE,font=None,conversion=True):\n if conversion:\n if not position: position=(0,0)\n if not size: size=1\n position=self.draw.plane.getToScreen(position,self.draw.window)\n ux,uy=self.draw.plane.units\n size=int(size*ux/50)\n else:\n if not position: position=(10,10)\n if not size: size=20\n self.window.print(text,position,size,color,font)", "def Print(self, s, color=(229, 153, 153, 255)):\r\n self.screen.blit(self.font.render(s, True, color), (5, self.textLine))\r\n self.textLine += 15", "def fill(self, x, y, width=None, height=None, char=None,\n fg=(255, 255, 255), bg=None):\n self.console.draw_rect(x, y, width, height, char, fg, bg)", "def PrintAt(self,x=0,y=0,text=''):\n self.SetCursor(x,y)\n self.Print(text)", "def print(self, my_screen, text_string):\n text_bitmap = self.font.render(text_string, True, BLACK)\n my_screen.blit(text_bitmap, [self.x_pos, self.y_pos])\n self.y_pos += self.line_height", "def display(self):\n for r in range(len(self.grid)):\n for c in range(len(self.grid[r])):\n if (r, c) == self.location:\n print('\\033[96m*\\x1b[0m', end=' ') # print a blue *\n else:\n print(self.grid[r][c], end=' ') # prints a space or wall\n print()\n print()", "def show( self):\n def symbol( i):\n return i<0 and (i==-2 and ' ' or '0') or chr(ord('a') + i)\n \n X, Y = np.max( self.board.positions, 0)\n # -2 to indicate outside board.\n display = np.zeros( (X+1,Y+1), dtype=int) - 2 \n for x, y in self.board.positions:\n display[x, y] = -1 # -1 to indicate unoccupied\n for p, i in self.occupation.items():\n x, y = self.board.positions[p]\n display[x, y] = i\n for x in xrange(X+1):\n s = ''.join( [ symbol( display[x, y]) for y in xrange(Y+1) ])\n print s", "def my_print(self):\n if self.size == 0:\n print(\"\")\n return\n for j in range(self.__position[1]):\n print(\"\")\n for i in range(self.size):\n if self.__position[0] > 0:\n print(\" \" * self.__position[0], end=\"\")\n print('#' * self.size)", "def draw(grid):\n\n for row in grid:\n for char in row:\n if char is None:\n sys.stdout.write(\" \")\n else:\n sys.stdout.write(char)\n sys.stdout.write(\"\\n\")", "def print(self):\r\n base = 8 * self.width\r\n print(base * \"-\")\r\n for x in range(self.height):\r\n output = \"\"\r\n for y in range(self.width):\r\n output = output + self.board[x][y] + \"|\"\r\n print(\"|\" + output)\r\n print(base * \"-\")", "def print_board(self):\n \n # How to show empty/p1/p2\n VALS = \".XO\"\n\n print(\"\\n a b c d e f g\")\n print(\" /--+-+-+-+-+-+--\\\\\")\n for r in range(_HEIGHT - 1, -1, -1):\n s = \"%s |\" % r\n for c in range(_WIDTH):\n # Print mark next to most recent move\n mark = \">\" if self.last_play_rc == (r, c) else \" \"\n s += mark + VALS[self.board[r * 7 + c]]\n print(s + \" |\")\n print(\" \\\\--+-+-+-+-+-+--/\")\n print(\" a b c d e f g\\n\")", "def my_print(self):\n if self.__size == 0:\n print()\n else:\n print(\"\\n\" * self.__position[1], end='')\n for x in range(self.__size):\n print(\" \" * self.__position[0], end='')\n print(\"#\" * self.__size)", "def my_print(self):\n if self.__size == 0:\n print(\"\")\n return\n [print(\"\") for x in range(0, self.__position[1])]\n for i in range(0, self.__size):\n [print(\" \", end=\"\") for i in range(0, self.__position[0])]\n [print(\"#\", end=\"\") for j in range(0, self.__size)]\n print(\"\")", "def printBoard(self):\n print(\"\"\"\nSpace 1 Space 2 Space 3 Space 4 Space 5 Space 6\n------- ------- ------- ------- ------- -------\"\"\")\n print(\"{:>4}{:>10}{:>10}{:>10}{:>10}{:>10}\".format(str(self.space1), str(self.space2), str(self.space3), str(self.space4), str(self.space5), str(self.space6)))\n print()", "def draw(self, screen, y, invert_color=False, fill_character=None):\n self.screen = screen\n string = self.generate_string()\n move(screen, y, 0)\n screen.clrtoeol()\n if fill_character:\n _, screen_cols = getmaxyx(screen)\n string = string.ljust(screen_cols, fill_character)\n if invert_color:\n screen.insstr(y, 0, string, curses.A_REVERSE)\n else:\n screen.insstr(y, 0, string)", "def my_print(self):\n if self.__size > 0:\n print(\"\\n\" * self.__position[1], end=\"\")\n for i in range(self.__size):\n print(\" \" * self.__position[0], end=\"\")\n print(\"#\" * self.__size)\n else:\n print()", "def my_print(self):\n length = self.__size\n\n if self.__size == 0:\n print(\"\")\n\n \"\"\"Print using position of y-axis.\"\"\"\n for i in range(self.__position[1]):\n print(\"\")\n for j in range(length):\n \"\"\"Print spaces and # in x-axis.\"\"\"\n print((\" \" * self.__position[0]) + (\"#\" * length))", "def display(self):\n for row in range(self.height):\n for col in range(self.width):\n char = '#' if self.pixels[row * self.width + col] else '.'\n print(char, end='')\n print()\n print()", "def print_rect(x, y, w, h, revision):\n disp_y = cy(y)\n disp_h = ch(h)\n if disp_h < 0:\n disp_y += disp_h\n disp_h = -disp_h\n \n print '<rect id=%s x=%s y=%s' % (qa(revision), qa(cx(x)), qa(disp_y),),\n print 'width=%s height=%s' % (qa(cw(w)), qa(disp_h),),\n print 'fill=\"white\"',\n print 'stroke=\"rgb(98%%,98%%,88%%)\" stroke-width=%s' % qa(line_width),\n print 'onmouseover=%s' % qa(\n \"var event = arguments[0] || window.event;\"\n \" if (event.shiftKey) {\"\n \" highlightRevision('\"+str(revision)+\"');\"\n \" return false;\"\n \" }\"),\n print ' />'", "def display(self):\n width = self.width\n height = self.height\n x = self.x\n y = self.y\n for d_y in range(y):\n print()\n for h in range(height):\n if x != 0:\n print(\" \" * x, end=\"\")\n print(\"#\" * width)", "def draw_box(stdscr, y, x, height, width, mode=0):\n if mode == 0:\n stdscr.addstr(y, x, \"┌\" + \"─\" * (width - 1) + \"┐\")\n stdscr.addstr(y + height, x, \"└\" + \"─\" * (width - 1) + \"┘\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"│\")\n stdscr.addstr(i, x + width, \"│\")\n if mode == 1:\n stdscr.addstr(y, x, \"╭\" + \"─\" * (width - 1) + \"╮\")\n stdscr.addstr(y + height, x, \"╰\" + \"─\" * (width - 1) + \"╯\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"│\")\n stdscr.addstr(i, x + width, \"│\")\n if mode == 2:\n stdscr.addstr(y, x, \"╔\" + \"═\" * (width - 1) + \"╗\")\n stdscr.addstr(y + height, x, \"╚\" + \"═\" * (width - 1) + \"╝\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"║\")\n stdscr.addstr(i, x + width, \"║\")", "def snakePrint():\n for snake in snake_pos: \n pg.draw.rect(game_disp, white, snake)", "def display(self):\n print(\"\\n\" * self.y, end='')\n for i in range(self.height):\n for j in range(self.width + self.x):\n if j < self.x:\n print(' ', end='')\n else:\n print('#', end='')\n print('')", "def my_print(self):\n if self.__size is not 0:\n for ite in range(self.__position[1]):\n print()\n for ite in range(self.__size):\n print(\" \" * self.__position[0], end=\"\")\n print(\"#\" * self.size)\n else:\n print()", "def display(field):\n side = int(math.sqrt(len(field))) # in number of elements (tiles)\n \n def pos():\n cy, cx = win.getyx()\n stdscr.addstr(0, 0, \"cy: \"+str(cy)+\", cx: \"+str(cx))\n\n def br():\n while True:\n c = stdscr.getch()\n if c == curses.KEY_RIGHT:\n break\n win.refresh()\n\n win.addstr(0, 0, '┏')\n for _ in range(side-1):\n win.addstr('━━━━━━')\n win.addstr('┳')\n win.addstr('━━━━━━')\n win.addstr('┓ ')\n\n for y in range(side):\n \n win.addstr('┃')\n for x in range(side):\n #stdscr.addstr(0, 0, \"side: \" + str(x))\n idx = y * side + x\n if field[idx] == 0:\n win.addstr(' '.center(6))\n else:\n n = field[idx]\n color = curses.color_pair(0)\n if n < 0:\n field[idx] *= -1\n n = field[idx]\n color = curses.A_BOLD | curses.A_STANDOUT\n elif n == 4:\n color = curses.color_pair(3)\n elif n == 8:\n color = curses.color_pair(4)\n elif n >= 16:\n color = curses.color_pair(1)\n \n #win.addstr(str(n).center(6), color)\n \n n = str(n)\n left = (6-len(n)) // 2\n right = 6 - (left + len(n))\n win.addstr(left*' ')\n win.addstr(n, color)\n win.addstr(right*' ')\n\n \n win.addstr('┃')\n win.addstr(' ')\n if y == side-1:\n break\n else: \n win.addstr('┣')\n for _ in range(side-1):\n win.addstr('━━━━━━')\n win.addstr('╋')\n win.addstr('━━━━━━')\n win.addstr('┫ ')\n \n win.addstr('┗')\n for _ in range(side-1):\n win.addstr('━━━━━━')\n win.addstr('┻')\n win.addstr('━━━━━━')\n #pos()\n #br()\n win.addstr('┛')\n\n #win.border()\n win.refresh()", "def print_space(self,text,width,w=1,h=1):\n texlen = len(text)\n if texlen > width:\n text = text[:width]\n self.lesprint(text,width)", "def draw(self, x, y, char=None, fg=(255, 255, 255), bg=None):\n self.console.draw_char(x, y, char, fg, bg)", "def display(self):\n for i in range(self.y):\n print()\n for i in range(self.height):\n for k in range(self.x):\n print(' ', end='')\n for j in range(self.width):\n print('#', end='')\n print()", "def screen_update(fill_blank=True):\n clear_screen()\n\n if g.content:\n xprint(g.content)\n\n if g.message or g.rprompt:\n length = c.charcount\n out = g.message or ''\n blanks = getxy().width - length(out) - length(g.rprompt or '') - 3\n out += ' ' * blanks + (g.rprompt or '')\n xprint(out)\n\n elif fill_blank:\n xprint(\"\")\n\n g.message = g.content = g.rprompt = False", "def my_print(self):\n if self.__size > 0:\n for k in range(self.__position[1]):\n print()\n for i in range(self.__size):\n for j in range(self.__position[0]):\n print(\" \", end='')\n print(\"#\" * self.__size)\n else:\n print()", "def display(self):\n for b in range(self.y):\n print()\n for i in range(self.height):\n print(\" \" * self.x + \"#\" * self.width)", "def display(self):\n print(\"\\n\" * self.y, end=\"\")\n for i in range(self.height):\n print(\" \" * self.x, end=\"\")\n for j in range(self.width):\n print(\"#\", end=\"\")\n print()", "def display_c(c, font, screen, lcd, size=5, x=0, y=0):\n char = font[str(c)]\n width, height = char.size\n \"\"\"\n if not(size == 10):\n size /= 10.0\n width = int(round(size*width))\n height = int(round(size*height))\n char.resize((width,height))\n \"\"\"\n size = int(round(size * 10))\n images.display_img(char,screen,lcd,size,x,y)", "def display(self):\n print('\\n' * (self.__y), end='')\n for point in range(self.__height):\n print(' ' * self.__x, end='')\n for point in range(self.__width - 1):\n # print(' ' * self.__x, end='')\n print('#', end='')\n print('#')", "def graphic(board, player1=1, player2=2):\n width = board.width\n height = board.height\n\n print()\n print(player1, \"with X\".rjust(3))\n print(player2, \"with O\".rjust(3))\n print(' ', end='')\n for x in range(width):\n print(\"{0:4}\".format(x), end='')\n print()\n for i in range(height - 1, -1, -1):\n print(\"{0:4d}\".format(i), end='')\n for j in range(width):\n loc = i * width + j\n p = board.states.get(loc, -1)\n if p == player1:\n print('X'.center(4), end='')\n elif p == player2:\n print('O'.center(4), end='')\n else:\n print('_'.center(4), end='')\n print()", "def display(self):\n print(\"\\n\" * self.__y, end='')\n for row in range(self.__height):\n if self.__x:\n print(\" \" * (self.__x), end='')\n if self.__width:\n print(\"#\" * self.__width)", "def display(self):\n for row0 in range(self.y):\n print()\n for row in range(self.height):\n for column0 in range(self.x):\n print(\" \", end=\"\")\n for column in range(self.width):\n print(\"#\", end=\"\")\n print()", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def print(self):\n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n for j in range(self.width):\n if self.board[i][j]:\n print(\"|X\", end=\"\")\n else:\n print(\"| \", end=\"\")\n print(\"|\")\n print(\"--\" * self.width + \"-\")", "def display(self):\n print(\"\\n\" * self.__y, end=\"\")\n for i in range(self.__height):\n print(\" \" * self.__x, end=\"\")\n print(\"#\" * self.__width, end=\"\")\n print()", "def fill():\n print('#', end='')", "def display(board):\n for i in range(height-1, -1, -1):\n print(' '.join(['O' if at(board, i * width + j) else '-' for j in range(1, width+1)]))\n print(\"\")", "def draw_board(self):\n print(\"\\n\" * 10)\n print(\"-PRINTING BOARD-\")\n for row in self.grid:\n for column in row:\n print(column.character(), end=\"\")\n print() # to create a new line", "def draw(self):\n # 5 is the number of characters per box add one for the header column\n sepreator_line = \"-\" * (len(self.letters) + 1) * 5 + \"-\"\n print(sepreator_line)\n print(\n \"| \" + \"\".join([f\"| {letter} \" for letter in self.letters]) + \"|\")\n print(sepreator_line)\n for number in self.numbers:\n print(f\"| {number} \" + \"\".join(\n [f\"| {self.positions[letter + number]} \" for letter in self.letters]) + \"|\")\n print(sepreator_line)", "def print(self, assignment):\n letters = self.letter_grid(assignment)\n for i in range(self.crossword.height):\n for j in range(self.crossword.width):\n if self.crossword.structure[i][j]:\n print(letters[i][j] or \" \", end=\"\")\n else:\n print(\"█\", end=\"\")\n print()", "def print(self, assignment):\n letters = self.letter_grid(assignment)\n for i in range(self.crossword.height):\n for j in range(self.crossword.width):\n if self.crossword.structure[i][j]:\n print(letters[i][j] or \" \", end=\"\")\n else:\n print(\"█\", end=\"\")\n print()", "def print(self, assignment):\n letters = self.letter_grid(assignment)\n for i in range(self.crossword.height):\n for j in range(self.crossword.width):\n if self.crossword.structure[i][j]:\n print(letters[i][j] or \" \", end=\"\")\n else:\n print(\"█\", end=\"\")\n print()", "def print_text(TINY_FONT, x, y, text, color = white):\n text_image = TINY_FONT.render(text, True, color)\n gameDisplay.blit(text_image, (x,y))", "def draw(self):\n\t\tfor i in range(0, self.size):\n\t\t\tprint('\\n' + \"----\" * self.size)\n\t\t\tfor j in range(0, self.size):\n\t\t\t\tprint(self.grid[i][j] + ' |', end=\" \")\n\t\tprint('\\n'+ \"----\" * self.size + '\\n')", "def display():\n screen.addch(head[0],head[1],'x')", "def print_board(self):\n print_sp = functools.partial(print, end=' ')\n print_sp(' ')\n for i in range(BOARD_SIZE):\n print_sp(i)\n print()\n for i in range(BOARD_SIZE):\n print_sp(i)\n for j in range(BOARD_SIZE):\n e = self.board[j][i]\n print_sp('●') if e == BLACK else print_sp('○') if e == WHITE else print_sp('·')\n print()", "def print(self):\n \n for i in range(self.height):\n print(\"--\" * self.width + \"-\")\n \n for j in range(self.width):\n \n if self.board[i][j]:\n print(\"|X\", end=\"\")\n \n else:\n print(\"| \", end=\"\")\n print(\"|\")\n \n print(\"--\" * self.width + \"-\")", "def display(self):\n [print() for i in range(self.__y)]\n for i in range(self.__height):\n [print(\" \", end=\"\") for i in range(self.__x)]\n for j in range(self.__width):\n print(\"#\", end=\"\")\n print()", "def display_playground(self, win, rock, paper, scissor):\n pygame.draw.rect(win, white, (0, screen_height // 4, screen_width, screen_height // 2))\n pygame.draw.rect(win, white, (230, 50, 50, 10))\n rock.draw(win, black)\n paper.draw(win, black)\n scissor.draw(win, black)", "def draw(self, screen):", "def display(self):\n stroke(51)\n fill(self.couleur)\n rect(self.pos_x, 0, self.largeur, self.min_y)\n rect(self.pos_x, self.min_y + self.hauteur, self.largeur, util.SCREEN_Y-(self.min_y + self.hauteur))", "def _drawstatus(self):\n (y, x) = self.chatscreen.getmaxyx()\n\n fillchar = '*' if self.busy > 0 else '-'\n form = '{:'+ fillchar +'^' + str(x - 1) + '}'\n\n self.chatscreen.addstr(y-1, 0, form.format('%s' % self.status()))", "def display(self):\n for i in range(self.__y):\n print()\n for i in range(self.__height):\n print(\" \" * self.__x + \"#\" * self.__width)", "def __str__(self):\n if self.__size != 0:\n [print(\"\") for i in range(0, self.__position[1])]\n for i in range(0, self.__size):\n [print(\" \", end=\"\") for j in range(0, self.__position[0])]\n [print(\"#\", end=\"\") for k in range(0, self.__size)]\n if i != self.__size - 1:\n print(\"\")\n return (\"\")", "def printBoard(self):\n\t\tkey = [' ', 'X', 'O']\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[0][0]] + ' | ' + key[self.state[0][1]] + ' | ' + key[self.state[0][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[1][0]] + ' | ' + key[self.state[1][1]] + ' | ' + key[self.state[1][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[2][0]] + ' | ' + key[self.state[2][1]] + ' | ' + key[self.state[2][2]])\n\t\tprint(' | |')", "def outline(self, x, y, width=None, height=None, char=None,\n fg=(255, 255, 255), bg=None):\n self.console.draw_frame(x, y, width, height, char, fg, bg)", "def drawRectangle(x, y, width, height):\n pen1.up()\n pen1.goto(x, y)\n pen1.down()\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)\n pen1.right(90)\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)", "def display(self): \n print ' ' \n print 'Connect ', NWIN, ' Board '\n print ' ' \n for r in reversed(range(self.getHeight())):\n for c in range(self.getWidth()):\n if self.cell[c][r] == BLACK:\n print '+',\n elif self.cell[c][r] == WHITE:\n print '-',\n else:\n print '.',\n print ' '\n for c in range(self.getWidth()):\n print c,\n print ' '\n print ' '", "def print_room(room):\r\n\r\n for row in room:\r\n for cell in row:\r\n if cell == \"obstacle\":\r\n print(\"O\", end=\"\")\r\n elif cell == \"robot\":\r\n print(\"R\", end=\"\")\r\n elif cell == \"empty\":\r\n print(\" \", end=\"\")\r\n elif cell == \"dirt\":\r\n print(\"*\", end=\"\")\r\n\r\n print()", "def print_game_over():\n print()\n print(\" _____ __ __ ______ ______ ________ _____ \")\n print(r\" / ____| /\\ | \\/ | ____| / __ \\ \\ / / ____| __ \\ \")\n print(r\" | | __ / \\ | \\ / | |__ | | | \\ \\ / /| |__ | |__) |\")\n print(r\" | | |_ | / /\\ \\ | |\\/| | __| | | | |\\ \\/ / | __| | _ / \")\n print(r\" | |__| |/ ____ \\| | | | |____ | |__| | \\ / | |____| | \\ \\ \")\n print(r\" \\_____/_/ \\_\\_| |_|______| \\____/ \\/ |______|_| \\_\\\\\")\n print()", "def ascii_to_screen(self, y, text, color=1, width=0):\n text_height = len(text)\n for line_index in range(len(text)):\n text_width = len(text[line_index]) if not width else width\n self._screen.hline(y+line_index, 0, ord(' '), self._dims[1])\n if self._dims[0] > text_height and self._dims[1] > text_width:\n self._screen.addstr(y+line_index, (self._dims[1]/2)-(text_width/2), text[line_index], curses.A_BOLD|curses.color_pair(color))\n self._screen.refresh()", "def render(self):\n for r in range(self.y_size):\n line = ''\n for c in range(self.x_size):\n glyph = self.MAP_GLYPH_TABLE[self.grid_data[r][c]]\n\n # overwrite with player\n if r == self.player_y and c == self.player_x:\n glyph = self.PLAYER_GLYPH_TABLE[self.player_heading]\n\n line += glyph\n print(line)\n\n print('\\n' * (20 - self.y_size))", "def printBoard(self):", "def display(self):\n\n #player UI\n s = \" \"\n for p in range(WIDTH):\n s += str(p)\n s += \" \"\n\n print(s)\n\n for row in range(HEIGHT):\n\n # player UI\n print(row, end=' ')\n\n for col in range(WIDTH):\n\n if self.board[row][col] == 1:\n print(\"X\", end=' ')\n elif self.board[row][col] == 2:\n print(\"O\", end=' ')\n else:\n print(\"-\", end=' ')\n print()", "def draw_s(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(20)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(50)", "def printSpace(x0,y0):\n\t\t\tprint\n\n\t\t\t# keep track of the island ids\n\t\t\tids = set()\n\n\t\t\t# convert id to a letter\n\t\t\tdef idChr(_id):\n\t\t\t\treturn chr(_id+97)\n\n\t\t\t# print the map\n\t\t\ti = 0\n\t\t\tfor y in xrange(h):\n\t\t\t\tfor x in xrange(w):\n\t\t\t\t\tif (x,y) == (x0,y0):\n\t\t\t\t\t\t# show current location\n\t\t\t\t\t\tprint \"O\",\n\t\t\t\t\telse:\n\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t# show island label if available\n\t\t\t\t\t\t\t_id = findIslands.pixelToIslandId[i]\n\t\t\t\t\t\t\tids.add(_id)\n\t\t\t\t\t\t\tprint idChr(_id),\n\t\t\t\t\t\texcept KeyError:\n\t\t\t\t\t\t\t# just display character if no island info available\n\t\t\t\t\t\t\tprint space[i],\n\t\t\t\t\ti += 1\n\t\t\t\tprint\n\n\t\t\t# print \n\t\t\tprint \"island -> parent -> global owner\"\n\t\t\tfor _id in ids:\n\t\t\t\tprint \"%s -> %s -> %s\" % (\n\t\t\t\t\tidChr(_id),\n\t\t\t\t\tidChr(findIslands.islandIdToParent[_id]._id),\n\t\t\t\t\tidChr(findIslands.getIslandOwner(_id)._id))", "def drawString(text: str):\n pass", "def display_character(window, name, path_template):\n # Could be improved a lot.\n border_size = 20\n path = \".\".join((path_template, \"200\", \"png\"))\n pic = pygame.image.load(path)\n pic_w, pic_h = pic.get_size()\n text = ft_title.render(\" \".join((\"<-\", name, \"->\")), 1, WHITE)\n text_w, text_h = text.get_size()\n pygame.draw.rect(window, GREY, (SCREEN_W/2 - pic_w/2 - border_size,\n SCREEN_H/2 - pic_h/2 - text_h - border_size,\n pic_w + border_size*2, pic_h + border_size*2),\n border_size)\n window.blit(pic, (SCREEN_W/2 - pic_w/2, SCREEN_H/2 - pic_h/2 - text_h))\n window.blit(text, (SCREEN_W/2 - text_w/2, SCREEN_H/2 + pic_h/2 - text_h/2))", "def display(self):\n \"\"\" Coordinates for position are x-axis (LR) and y-axis (NS) \"\"\"\n for coordY in range(self.y):\n print()\n for column in range(self.height):\n for coordLR in range(self.x):\n print(\" \", end=\"\")\n for row in range(self.width):\n print(\"#\", end=\"\")\n print()", "def display(self):\n\n print(\"\\n\" * self.__y, end='') # y offset\n\n for i in range(self.__height):\n print(\" \" * self.__x, end='') # x offset\n print(\"#\" * self.__width)", "def graphic(self, board, player1, player2):\n width = board.width\n height = board.height\n\n print(\"Player\", player1, \"with X\".rjust(3))\n print(\"Player\", player2, \"with O\".rjust(3))\n print()\n for x in range(width):\n print(\"{0:8}\".format(x), end='')\n print('\\r\\n')\n for i in range(height - 1, -1, -1):\n print(\"{0:4d}\".format(i), end='')\n for j in range(width):\n p = board.board_value[i][j]\n if p == player1:\n print('X'.center(8), end='')\n elif p == player2:\n print('O'.center(8), end='')\n else:\n print('_'.center(8), end='')\n print('\\r\\n\\r\\n')", "def print(self, board: Board):\n # Render first horizontal alphabetical x-axis markers\n row = [\" \"]\n\n for x_marker in self.coordinate_map:\n row.append(\" \" + x_marker)\n\n print(\"\".join(row))\n\n # Render the rest of the cheese board\n for y, y_row in enumerate(self.map):\n # Render left side row numbers\n row = [str((8-y)) + \" \"]\n\n # Render battlefield\n for x, square in enumerate(y_row):\n # Check with Board if there is a piece on this coordinate\n anybody = board.who_is_in(*[x, y])\n\n # Anybody out there?\n if anybody is not None:\n # Oh hai\n row.append(anybody.name)\n else:\n # Print a simple dot\n row.append(\" .\")\n\n # Print the entire row\n print(\"\".join(row))", "def showBoard(self):\n \n brd = \"\\n | | \\n\" + \\\n \" \" + self.squares[0] + \" | \" + self.squares[1] + \" | \" + self.squares[2] + \" \\n\" + \\\n \"___|___|___\\n\" + \\\n \" | | \\n\" + \\\n \" \" + self.squares[3] + \" | \" + self.squares[4] + \" | \" + self.squares[5] + \" \\n\" + \\\n \"___|___|___\\n\" + \\\n \" | | \\n\" + \\\n \" \" + self.squares[6] + \" | \" + self.squares[7] + \" | \" + self.squares[8] + \" \\n\" + \\\n \" | | \\n\"\n\n return brd", "def write(self, x, y, msg, fg=(255, 255, 255), bg=None):\n self.console.draw_str(x, y, msg, fg, bg)", "def graphic(self, board, player1, player2):\n width = board.width\n height = board.height\n\n print(\"Player\", player1, \"with X\".rjust(3))\n print(\"Player\", player2, \"with O\".rjust(3))\n print()\n for x in range(width):\n print(\"{0:8}\".format(x), end='')\n print('\\r\\n')\n for i in range(height - 1, -1, -1):\n print(\"{0:4d}\".format(i), end='')\n for j in range(width):\n loc = i * width + j\n p = board.state.get(loc, 0)\n if p == player1:\n print('X'.center(8), end='')\n elif p == player2:\n print('O'.center(8), end='')\n else:\n print('_'.center(8), end='')\n print('\\r\\n\\r\\n')", "def Render(board):\r\n print(\" 0 1 2\")\r\n for i in range(0, 3):\r\n spot1 = \" \" if board[i][0] is None else str(board[i][0])\r\n spot2 = \" \" if board[i][1] is None else str(board[i][1])\r\n spot3 = \" \" if board[i][2] is None else str(board[i][2])\r\n print(f\"{i} |{spot1} {spot2} {spot3}|\")", "def drawRectangle(width, height, tilt, penColor, fillColor):\n Lucia.color(penColor,fillColor)\n Lucia.seth(tilt)\n Lucia.begin_fill()\n for i in range(2):\n Lucia.forward(width)\n Lucia.left(90)\n Lucia.forward(height)\n Lucia.left(90)\n Lucia.end_fill()", "def curses_print_map(self):\n map_window = self.stdscreen.subwin(5,5)\n map_keypad = map_window.keypad(1)\n map_panel = panel.new_panel(map_window)\n\n map_panel.update_panels()\n map_panel.top()\n map_panel.show()\n map_window.clear()\n\n x = 0; y=0; z=0\n\n # Print map phase\n draw_map(self,[x,y,z])\n\n def draw_map(game,loc):\n grid = game.world.grid\n\n z = loc[2] # Load the current floor (z)\n\n for x in range(game.conf.x_dim):\n for y in range(game.conf.y_dim):\n # Draw a map here!\n pass", "def display(self):\n print('')\n print(\" ---------------------------------\")\n counter = 0\n for row in self.positions:\n counter += 1\n line = f'{counter}: |'\n for space in row:\n if isinstance(space, str):\n line += f' {space} |'\n else:\n starter = ' '\n ender = '|'\n if space.team == 'white':\n piece = stylize(space.symbol+' ', colored.fg(\"light_blue\"))\n else:\n piece = stylize(space.symbol+' ', colored.fg(\"light_red\"))\n line += starter+piece+ender\n print(line)\n print(\" ---------------------------------\")\n print(\" | A | B | C | D | E | F | G | H |\\n\")", "def print_board(self):\n to_join = [\"-\" * self.DIMENSIONS[0]]\n for row in self.grid:\n to_join.append(\"\".join([ch.letter if ch is not None else \" \" for ch in row]))\n\n print(\"\\n\".join(to_join))", "def print(self) -> str:\n if self.is_unoccupied():\n return \"\"\n return str(\"%s-%s\" % (self.piece.color.name, self.piece.name.name))", "def print( self, str, pos=None ):\n\t\tif pos:\n\t\t\tself.set_cursor( pos )\n\t\tself.write( str.encode(\"ASCII\") )" ]
[ "0.77359396", "0.73379", "0.72519505", "0.7151739", "0.6960012", "0.69556326", "0.6913779", "0.68737376", "0.6843139", "0.68156284", "0.6770897", "0.67551607", "0.6753198", "0.67503697", "0.67081517", "0.6704995", "0.66788816", "0.6676752", "0.6670293", "0.6664551", "0.66513497", "0.66512847", "0.6650872", "0.66501623", "0.66452307", "0.6594629", "0.65933496", "0.6589184", "0.6584753", "0.6567997", "0.6547536", "0.6537938", "0.652542", "0.6516106", "0.6513714", "0.65082395", "0.6486085", "0.6474186", "0.64734167", "0.6466876", "0.6463731", "0.64596206", "0.64541465", "0.6452925", "0.64520806", "0.64505225", "0.64309245", "0.64309245", "0.64309245", "0.64309245", "0.64309245", "0.64309245", "0.6422265", "0.6421695", "0.6414916", "0.64096737", "0.640252", "0.6396343", "0.6396343", "0.6396343", "0.63956183", "0.6384697", "0.63809884", "0.6375996", "0.6372488", "0.6372019", "0.6342599", "0.63422096", "0.63412696", "0.63394976", "0.6334542", "0.63256204", "0.6314894", "0.6312721", "0.6310623", "0.63011897", "0.6299722", "0.62948453", "0.6293149", "0.6291672", "0.62820303", "0.62815917", "0.626721", "0.62538743", "0.6245728", "0.6244378", "0.6242534", "0.62389654", "0.62272316", "0.6221988", "0.6211241", "0.62038004", "0.6197778", "0.61963767", "0.61886036", "0.6180609", "0.61695665", "0.61656797", "0.6154744", "0.6145053" ]
0.6455042
42
method that convert Python objects into strings. Return the object in string.
def __str__(self) -> str: return "[Rectangle] ({}) {}/{} - {}/{}".\ format(self.id, self.x, self.y, self.width, self.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def str_(object_):\n return str(object_)", "def objectToString(obj):\n if (hasattr(obj, \"__iter__\")):\n # matrix or vector\n if len(obj) == 0:\n return \"\"\n else:\n if (hasattr(obj[0], \"__iter__\")):\n # matrix\n return matrixToString(obj)\n else:\n # vector\n return tupleToString(obj)\n elif hasattr(obj, 'name'):\n return obj.name\n else:\n return str(obj)", "def asString(obj):\n if type(obj) in _STR_TYPES:\n return obj\n return str(obj)", "def stringify(obj):\n tp = type(obj)\n if issubclass(tp, basestring):\n return obj\n elif hasattr(tp, '__unicode__'):\n s = tp.__unicode__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__unicode__ did not return a string')\n return s\n elif hasattr(tp, '__str__'):\n s = tp.__str__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__str__ did not return a string')\n return s\n else:\n return str(obj)", "def sstr(obj):\n if IS_PY2:\n # For lists and tuples in python2, remove unicode string representation characters.\n # i.e. ensure lists are printed as ['a', 'b'] and not [u'a', u'b']\n if type(obj) in [list]:\n return [sstr(item) for item in obj] # pragma: no cover # noqa\n elif type(obj) in [tuple]:\n return tuple(sstr(item) for item in obj) # pragma: no cover # noqa\n\n return unicode(obj).encode(DEFAULT_ENCODING) # pragma: no cover # noqa\n else:\n return obj # pragma: no cover", "def _tostr(obj): # pragma: no cover\n return obj if isinstance(obj, str) else obj.decode()", "def stringify(obj):\n tp = type(obj)\n if issubclass(tp, basestring):\n return obj\n elif hasattr(tp, '__unicode__'):\n s = tp.__unicode__(obj)\n if not isinstance(s, basestring):\n raise TypeError, '__unicode__ did not return a string'\n return s\n elif hasattr(tp, '__str__'):\n s = tp.__str__(obj)\n if not isinstance(s, basestring):\n raise TypeError, '__str__ did not return a string'\n return s\n else:\n return str(obj)", "def obj_to_str(obj):\n try:\n # Automatically return repr() for a certain types to avoid going through the whole function\n # and then returning the repr() right at the end.\n repr_types = (str, astropy.io.fits.header.Header)\n # Ensure FITS headers are just treated as string representations rather than iterables\n except NameError:\n repr_types = str\n if isinstance(obj, repr_types):\n # Default cases\n return repr(obj)\n\n try:\n # Convert to give same string for 1, 1.0, 1.0000 etc.\n return repr(float(obj))\n except (TypeError, ValueError):\n pass\n\n if isinstance(obj, dict):\n # Normalise dict order by sorting\n obj = sorted([obj_to_str(k) + ': ' + obj_to_str(v) for k, v in obj.items()])\n return '{' + ', '.join(obj) + '}'\n\n if isinstance(obj, set):\n # Normalise set order by sorting, then treat as standard iterable\n obj = list(sorted(obj))\n\n try:\n try:\n # Try short circuit for all numeric case for performance reasons (this doesn't change\n # the output string, but can avoid repeated calling of obj_to_str() for e.g. numpy\n # arrays).\n return repr(list(map(float, obj)))\n except (TypeError, ValueError):\n pass\n # Treat all iterables the same and normalise contents\n return '[' + ', '.join([obj_to_str(x) for x in obj]) + ']'\n\n except TypeError:\n pass\n\n # Fallback to \"official\" string representation of object\n return repr(obj)", "def get_str(self, obj):\n if self.pretty:\n return pprint.pformat(obj)\n else:\n return str(obj)", "def dump_string(obj: object) -> str:\n return dump_bytes(obj).decode(encoding=\"UTF-8\")", "def _stringify(obj):\r\n if isinstance(obj, unicode):\r\n return obj.encode('utf-8')\r\n elif isinstance(obj, str):\r\n return obj\r\n else:\r\n raise TypeError('Object is not a string.')", "def to_str(self) -> str:", "def ToString():\n @pass_failures\n def to_string(data):\n value = data.value\n if isinstance(value, Mapping):\n value = {k: str(v) for k, v in value.items()}\n else:\n value = str(value)\n data.value = value\n return data\n return to_string", "def value_to_string(self, obj):\n value = self.value_from_object(obj)\n return value", "def u(obj):\n return obj if isinstance(obj, str) else str(obj)", "def astr(obj):\n\treturn unicode(obj).encode(\"ascii\", \"replace\")", "def to_string(self):\r\n return self.__str__()", "def toString():", "def ustr(obj):\n if IS_PY2:\n # If we are getting a string, then do an explicit decode\n # else, just call the unicode method of the object\n if type(obj) in [str, basestring]: # pragma: no cover # noqa\n return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa\n else:\n return unicode(obj) # pragma: no cover # noqa\n else:\n if type(obj) in [bytes]:\n return obj.decode(DEFAULT_ENCODING)\n else:\n return str(obj)", "def __str__(self):\n return str(self.obj)", "def srepr(obj):\n return repr(str(obj))", "def __str__(self):\n return str(self.__dict__['_obj'])", "def _object2string(self,param_name,obj,replace=True):\n self.debug(\"object2string(%s,%s)\"%(param_name,obj))\n translator = self.translators[param_name]\n\n if not replace:\n translator=copy.copy(translator)\n\n return translator.object2string(obj)", "def repr_(object_):\n return repr(object_)", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def value_to_string(self, obj):\n value = self._get_val_from_obj(obj)\n return self.get_prep_value(value)", "def _to_str(obj: object) -> str:\n if obj is Ellipsis:\n return '...'\n elif isinstance(obj, type) and not isinstance(obj, _GENERIC_ALIAS_TYPE):\n if obj.__module__ == 'builtins':\n return obj.__qualname__\n else:\n return f'{obj.__module__}.{obj.__qualname__}'\n else:\n return repr(obj)", "def safestr(obj, encoding='utf-8'):\n if isinstance(obj, unicode):\n return obj.encode(encoding)\n elif isinstance(obj, str):\n return obj\n elif hasattr(obj, 'next') and hasattr(obj, '__iter__'): # iterator\n return itertools.imap(safestr, obj)\n else:\n return str(obj)", "def ToString(self):\r\n pass", "def ToString(self):\r\n pass", "def _tostr(t):\n\treturn t.__unicode__()", "def Str4R(obj):\n # for objects known by PypeR\n if type(obj) in str_func:\n return(str_func[type(obj)](obj))\n # for objects derived from basic data types\n for tp in base_tps:\n if isinstance(obj, tp):\n return(str_func[tp](obj))\n # for any other objects\n return(OtherStr(obj))", "def to_string(obj):\n if isinstance(obj, dict):\n str_obj = '{'\n for key, value in obj.items():\n str_obj += Parser.parse_text(key)+': '+Parser.parse_text(value)+'\\n'\n return str_obj + '\\b}'\n else:\n return Parser.parse_text(obj)", "def as_str(self):\n return self.as_type(str)", "def to_string(self):\n return json.dumps(self.to_json(), cls=ObjectEncoder)", "def safeToString():", "def __str__(self):\n buf = StringIO()\n self.write_to(buf)\n return buf.getvalue()", "def __str__(self):\n return bytes_to_str(bytes(self))", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def __str__(self) -> str:\n return str(self.getvalue())", "def asString(self):\n\n res = []\n for v in list(self.vars.values()):\n res.append(v.asString())\n res.append('')\n for e in list(self.enums.values()):\n res.append(e.asString())\n res.append('')\n for s in list(self.structs.values()):\n res.append(s.defAsString())\n res.append('')\n for s in list(self.structs.values()):\n res.append(s.dataAsString())\n\n return '\\n'.join(res)", "def __str__(self):\n return str(self.__s)", "def toString(self) -> unicode:\n ...", "def toString(self) -> unicode:\n ...", "def serialize_str(self, obj):\n if len(obj) < 0x100:\n return 'U' + struct.pack('<B', len(obj)) + obj\n return 'T' + struct.pack('<I', len(obj)) + obj", "def convert_to_builtin_type(obj):\n\n return str(obj)", "def get_result(self, obj):\n return str(obj)", "def get_result(self, obj):\n return str(obj)", "def serialize_to_python(cls, value):\n return repr(value)", "def dump_object(self, value):\n t = type(value)\n if t is int or t is long:\n return str(value)\n return '!' + pickle.dumps(value)", "def _convert_construction_info_to_string(obj):\n if not hasattr(obj, '_constructor_args'):\n raise AttributeError('obj has no attribute _constructor_args.')\n import StringIO\n output = StringIO.StringIO()\n info = {}\n info['_module_name'] = obj.__class__.__module__\n info['_class_name'] = obj.__class__.__name__\n encoded_constructor_args = {}\n for k, v in obj._constructor_args.items():\n if isinstance(v, chainer.Link):\n encoded_constructor_args[k] \\\n = _convert_construction_info_to_string(v)\n elif isinstance(v, six.string_types):\n encoded_constructor_args[k] = 'STR' + v\n else:\n encoded_constructor_args[k] = v\n info['_encoded_constructor_args'] = encoded_constructor_args\n numpy.save(output, arr=info)\n encoded_construction_info = 'OBJ' + output.getvalue()\n output.close()\n return encoded_construction_info", "def _to_string(self, data=None):\n if not data:\n raise ValueError(\"Please provide a correct data structure.\")\n\n if isinstance(data, dict):\n return str(json.dumps(data))\n elif isinstance(data, list):\n return ' '.join(data)\n else:\n return data", "def __str__(self):\n return str(self.serialize())", "def _transform(obj):\n\n if isinstance(obj, date) or isinstance(obj, time) or isinstance(obj, datetime):\n return str(obj)\n if isinstance(obj, decimal):\n return str(float(obj))\n if obj == None: \n return 'null'\n return str(obj)", "def toString(self) -> str:\n raise NotImplementedError", "def _convertListToString(self, list_of_objects):\n return (';').join(list_of_objects)", "def __str__(self):\n\n if compat.PY3:\n return self.__unicode__()\n return self.__bytes__()", "def serialize(self, obj):\n return dill.dumps(obj, 0).decode('latin-1')", "def __str__(self):\n return bytes_to_string(self._bytes)", "def stringReco(obj):\n name = obj.get_name()\n name = obj._pid if (name is None) else name\n return (\"pdg: \" + name + \" E: \" + str(obj._E)\n + \" px: \" + str(obj._px) + \" py: \" + str(obj._py)\n + \" pz: \"+ str(obj._pz) + \" mass: \" + str(obj._m))", "def __str__(self):\n return json.dumps(self.obj)", "def __str__(self):\n return self.__unicode__().encode('utf-8').decode()", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return str(self.__dict__)", "def to_str(self):\n return pprint.pformat(self.to_dict())", "def safe_str(obj):\n try:\n return str(obj)\n except UnicodeEncodeError:\n # obj is unicode\n return unicode(obj).encode('unicode_escape')", "def safe_str(obj):\n try:\n return str(obj)\n except UnicodeEncodeError:\n # obj is unicode\n return unicode(obj).encode('unicode_escape')", "def safe_str(obj):\n try:\n return str(obj)\n except UnicodeEncodeError:\n # obj is unicode\n return unicode(obj).encode('unicode_escape')", "def format_result(self,obj):\n return unicode(obj)", "def byte2str(self, obj):\n encoding = \"utf-8\"\n if isinstance(obj, list):\n if len(obj)>0 and not isinstance(obj[0], str):\n self.helper.log_debug(\n \"conversion from list of %s onto list of <class 'str'>\" %\n type(obj[0]))\n return [ s.decode(encoding) for s in obj ]\n elif not isinstance(obj, str):\n self.helper.log_debug(\n \"conversion from %s onto <class 'str'>\" %\n type(obj))\n return obj.decode(encoding)\n return obj", "def _object_dump_to_string(self, obj, max_recursion_level, level=0, debug_level=0):\n if level > max_recursion_level:\n return \"\".encode(\"ASCII\")\n dump_string = obj.__class__.__name__.encode(\"ASCII\")\n if debug_level == 2:\n print(\"\\t\"*level+\"level: {}, class name {}\".format(level, dump_string))\n if hasattr(obj, '__name__'): # to distinguish functions from each other\n dump_string += obj.__name__.encode(\"ASCII\")\n if debug_level == 2:\n print(\"\\t\"*level+\"level: {}, function name {}\".format(level, obj.__name__.encode(\"ASCII\")))\n\n # Get insides of the objects, based on the type\n if isinstance(obj, str):\n if debug_level == 2:\n print(\"\\t\"*level+\"level: {}, obj is str: {}\".format(level, obj))\n return dump_string + obj\n else:\n try:\n items = copy(vars(obj))\n if hasattr(obj, 'PickleCacheBlackList'):\n if debug_level == 2:\n print(\"\\t\" * level + \"obj has blacklist\", obj.PickleCacheBlackList)\n for v in obj.PickleCacheBlackList:\n del items[v]\n items = sorted(items.items())\n except:\n try:\n items = sorted(obj.items())\n except:\n # Try to sort the items.\n try:\n items = [(str(i), o) for i, o in enumerate(sorted(obj))]\n except:\n # After all fails, do not sort the insides, but this can be bad.\n # Print log that this happens.\n items = [(str(i), o) for i, o in enumerate(obj)]\n if len(items) > 0:\n log.debug(\"Can't sort insides of object type {}, first element is {}\".format(obj.__class__.__name__, items[0][1].__class__.__name__))\n\n if debug_level == 2:\n print(\"\\t\"*level+\"level: {}, items: {}\".format(level, items))\n for attribute, value in items:\n try:\n if debug_level == 2:\n print(\"\\t\" * level + \"level: {}, attribute: {}\".format(level, attribute))\n try:\n add_string = self._object_dump_to_string(attribute, max_recursion_level, level + 1, debug_level)\n except:\n add_string = self.pickle(attribute)\n dump_string += add_string\n except pickle.PicklingError: # attribute could not be dumped\n pass\n\n try:\n if debug_level == 2:\n print(\"\\t\" * level + \"level: {}, value: {}\".format(level, value))\n try:\n add_string = self._object_dump_to_string(value, max_recursion_level, level + 1, debug_level)\n except:\n add_string = self.pickle(value)\n dump_string += add_string\n except pickle.PicklingError: # attribute could not be dumped\n pass\n\n if debug_level > 0 and level == 0:\n print(\"dump_string is {}\\n\"\n \"Compare this with another cache hash with command\\n\"\n \" $ cmp -bl <(echo -n abcda) <(echo -n aqcde)\".format(hashlib.sha256(six.binary_type().join([dump_string])).hexdigest()))\n\n return dump_string", "def __str__(self):\n return str(self.__data)", "def _convert_to_str(self, data):\n raise NotImplementedError()", "def __str__(self):\n sio = StringIO()\n for k in self:\n sio.write(\"%s %s\\n\" % (repr(k), repr(self[k])))\n return sio.getvalue()", "def string(self):\n return str(self._dict)", "def as_string(self, value, context=None):\n return str(value)", "def toString(self):\n\n sMembers = '';\n for sAttr in self.getDataAttributes():\n oValue = getattr(self, sAttr);\n sMembers += ', %s=%s' % (sAttr, oValue);\n\n oClass = type(self);\n if sMembers == '':\n return '<%s>' % (oClass.__name__);\n return '<%s: %s>' % (oClass.__name__, sMembers[2:]);", "def transform_python(self, value):\n return str(value)", "def tostr (x):\n if isinstance (x, tuple):\n return tuple ( map (tostr, x))\n if isinstance(x, (float, numpy.float32,numpy.float64)):\n return float_to_str(x)\n return str(x)", "def lstr (obj):\n\n cmdlenc = locale.getdefaultlocale()[1]\n return repr(obj).decode(\"unicode_escape\").encode(cmdlenc)", "def convert_to_string(value: Any) -> str:\n if isinstance(value, str):\n return value\n\n if isinstance(value, bytes):\n return value.decode(\"utf-8\")\n\n return str(value)", "def __str__(self):\n return unicode(self).encode('utf-8')", "def toStr(self, protoObj):\n return text_format.MessageToString(protoObj)", "def toStr(self, protoObj):\n return text_format.MessageToString(protoObj)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)", "def to_str(self):\n import simplejson as json\n if six.PY2:\n import sys\n reload(sys)\n sys.setdefaultencoding(\"utf-8\")\n return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)" ]
[ "0.82419026", "0.7954741", "0.78152406", "0.773751", "0.7668481", "0.7638423", "0.7572983", "0.73172534", "0.72955215", "0.7287602", "0.72510856", "0.7232165", "0.7204237", "0.71857375", "0.7182068", "0.7144081", "0.7121091", "0.7044212", "0.69868326", "0.6939111", "0.6920688", "0.6909645", "0.6903062", "0.6887739", "0.68830866", "0.68830866", "0.68830866", "0.686469", "0.68627685", "0.68318003", "0.68318003", "0.68306065", "0.68209237", "0.68074995", "0.6779421", "0.67493683", "0.67379147", "0.67317617", "0.6685355", "0.66779184", "0.66779184", "0.66779184", "0.6662163", "0.66227996", "0.66212016", "0.66212016", "0.6618224", "0.6616809", "0.6606078", "0.6606078", "0.6599808", "0.6599572", "0.6594507", "0.6592152", "0.6589933", "0.65539944", "0.6542247", "0.6526847", "0.64832926", "0.6480904", "0.64626646", "0.6459498", "0.64437056", "0.64286923", "0.6419445", "0.6419445", "0.6404942", "0.639459", "0.639459", "0.639459", "0.63754773", "0.6375178", "0.63750553", "0.6372865", "0.63717836", "0.63639", "0.63576967", "0.63464665", "0.63427544", "0.634196", "0.63396436", "0.6335469", "0.63338244", "0.6329761", "0.63253784", "0.63253784", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156", "0.63194156" ]
0.0
-1
method that update the arguments of each attribute with the argument kwargs.
def update(self, *args, **kwargs): if args: my_list = ['id', 'width', 'height', 'x', 'y'] for i in range(len(args)): setattr(self, my_list[i], args[i]) else: for key, value in kwargs.items(): setattr(self, key, value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateAttrs(self, kwargs):\n for k, v in kwargs.iteritems():\n setattr(self, k, v)", "def update(self, *args, **kwargs):\n if args is not () and args is not None:\n attr_names = [\"id\", \"size\", \"x\", \"y\"]\n for index, attr in enumerate(args):\n setattr(self, attr_names[index], attr)\n else:\n for key, value in kwargs.items():\n if hasattr(self, key):\n setattr(self, key, value)", "def update_from_kwargs(self, **kwargs):\n for (key, value) in kwargs.items():\n setattr(self, key, value)", "def update(self, **kwargs):\n for key, val in kwargs.items():\n setattr(self, key, val)", "def update(self, *args, **kwargs):\n if kwargs is not None:\n for key, value in kwargs.items():\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n arg_name = ['id', 'width', 'height', 'x', 'y']\n \"\"\" If args only, sets attribute to correct arg_name \"\"\"\n if len(args) > 0:\n numArgs = 0\n for attr in range(len(args)):\n setattr(self, arg_name[numArgs], args[numArgs])\n numArgs += 1\n \"\"\" Put kwargs into dict - if key matches arg_name, set to value \"\"\"\n kwargs_dict = kwargs\n for key, value in kwargs_dict.items():\n for attr in range(len(arg_name)):\n if key == arg_name[attr]:\n setattr(self, arg_name[attr], value)", "def update(self, *args, **kwargs):\n new = [\"id\", \"size\", \"x\", \"y\"]\n for pos, val in enumerate(args):\n setattr(self, new[pos], val)\n for key in kwargs:\n setattr(self, key, kwargs[key])", "def update(self, *args, **kwargs):\n if args:\n li = [\"id\", \"size\", \"x\", \"y\"]\n for i in range(len(args)):\n setattr(self, li[i], args[i])\n else:\n for i, j in kwargs.items():\n setattr(self, i, j)", "def update(self, *args, **kwargs):\n if args:\n arg_order = [\"id\", \"size\", \"x\", \"y\"]\n for index, arg in enumerate(args):\n setattr(self, arg_order[index], arg)\n elif kwargs:\n for key, value in kwargs.items():\n if hasattr(self, key):\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n if len(args) != 0:\n i = 0\n attr = ['id', 'width', 'height', 'x', 'y']\n for arg in args:\n setattr(self, attr[i], args[i])\n i += 1\n else:\n for key, val in kwargs.items():\n setattr(self, key, val)", "def update(self, *args, **kwargs):\n if args:\n if args is not None:\n lista = [\"id\", \"size\", \"x\", \"y\"]\n for i, j in zip(args, lista):\n setattr(self, j, i)\n else:\n for key, value in kwargs.items():\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n selves = ['id', 'size', 'x', 'y']\n if args is not None and len(args) is not 0:\n for a in range(len(args)):\n setattr(self, selves[a], args[a])\n else:\n for key, value in kwargs.items():\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n attrs = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n\n if args:\n for i in range(len(args)):\n setattr(self, attrs[i], args[i])\n elif kwargs is not None:\n for key, value in kwargs.items():\n if key in attrs:\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n sqrlist = [\"id\", \"size\", \"x\", \"y\"]\n if args and len(args) != 0:\n for i in range(len(sqrlist)):\n if i < len(args):\n # call to setter method\n setattr(self, sqrlist[i], args[i])\n else:\n if kwargs and len(kwargs) != 0:\n for k in sqrlist:\n for key, value in kwargs.items():\n if k == key:\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n if args is None or len(args) == 0:\n for i in kwargs:\n if hasattr(self, i):\n setattr(self, i, kwargs[i])\n largs = list(args)\n latts = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n for i in range(len(largs)):\n setattr(self, latts[i], largs[i])", "def update(self, *args, **kwargs):\n list_args = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n count = 0\n a_dict = {}\n\n if args:\n if len(args) > 0 and len(args) < 6:\n for arg in args:\n a_dict.update({list_args[count]: arg})\n setattr(self, list_args[count], arg)\n count += 1\n elif kwargs:\n for key, value in kwargs.items():\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n assign = ('id', 'width', 'height', 'x', 'y')\n if args:\n for key, idx in zip(assign, range(len(args))):\n exec('self.{} = {}'.format(key, args[idx]))\n else:\n for key, val in kwargs.items():\n if key in ('id', 'width', 'height', 'x', 'y'):\n exec('self.{} = {}'.format(key, val))", "def set_params(self,**kwargs):\n for key in kwargs:\n setattr(self, key, kwargs[key])", "def update(self, *args, **kwargs):\n attributes = [\"id\", \"size\", \"x\", \"y\"]\n if len(args) > 0:\n for i in range(len(args)):\n setattr(self, attributes[i], args[i])\n else:\n self.id = kwargs.get(\"id\", self.id)\n self.size = kwargs.get(\"size\", self.size)\n self.x = kwargs.get(\"x\", self.x)\n self.y = kwargs.get(\"y\", self.y)", "def update(self, **kwargs):\n for k, v in kwargs.iteritems():\n if hasattr(self, k):\n setattr(self, k, v)", "def update(self, **kwargs):\n print(\"Updating model\")\n print(kwargs)\n for key in kwargs:\n setattr(self, key, kwargs[key])", "def update(self, *args, **kwargs):\n if args and len(args) > 0:\n keys = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n for i, v in enumerate(args):\n setattr(self, keys[i], v)\n else:\n for k, v in kwargs.items():\n setattr(self, k, v)", "def updateParameters(self,*args,**kwargs):\n for key in kwargs.keys():\n self._params[key] = kwargs[key]", "def update(self, **kwargs):\n for key, value in kwargs.items():\n if key not in self.VALID_NAMES:\n continue\n\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n flist = ['id', 'width', 'height', 'x', 'y']\n fieldlist = [0, 0, 0, 0, 0]\n idx = 0\n for arg in args:\n fieldlist[idx] = arg\n idx += 1\n if fieldlist[0] > 0:\n self.id = fieldlist[0]\n if fieldlist[1] > 0:\n self.width = fieldlist[1]\n if fieldlist[2] > 0:\n self.height = fieldlist[2]\n if fieldlist[3] > 0:\n self.x = fieldlist[3]\n if fieldlist[4] > 0:\n self.y = fieldlist[4]\n flist = ['id', 'width', 'height', 'x', 'y']\n if len(args) == 0:\n for key, value in kwargs.items():\n if key in flist:\n setattr(self, key, value)", "def add_attributes(data, **kwargs):\n for key in kwargs:\n data[key] = kwargs[key]", "def update(self, arg=None, **kwargs):\n if arg:\n if hasattr(arg, 'keys'):\n for k in arg: self[k] = arg[k]\n else:\n for k, v in arg: self[k] = v\n\n if kwargs:\n for k in kwargs: self[k] = kwargs[k]", "def setAttributes(self, args):\n for atr in self.defaultAttributes:\n if args.has_key(atr):\n # convert atr to proper type\n objAttr = getattr(self, atr)\n myType = type(args[atr])\n if type(objAttr) == types.IntType and myType <> types.IntType:\n args[atr] = int(args[atr])\n elif type(objAttr) == types.StringType and myType <> types.StringType:\n args[atr] = str(args[atr])\n elif type(objAttr) == types.ListType and myType <> types.ListType:\n args[atr] = eval(args[atr])\n elif type(objAttr) == types.DictType and myType <> types.DictType:\n args[atr] = eval(args[atr])\n elif type(objAttr) == types.FloatType and myType <> types.FloatType:\n args[atr] = float(args[atr])\n setattr(self, atr, args[atr])", "def update(instance, args):\n for key in args.keys():\n setattr(instance, key, args[key])\n return instance", "def updateArgs(self, namespace, updates):\n namespace = self._fixNS(namespace)\n for k, v in updates.items():\n self.setArg(namespace, k, v)", "def update(self, *args, **kwargs):\n if len(args) != 0:\n try:\n self.id = args[0]\n self.width = args[1]\n self.height = args[2]\n self.x = args[3]\n self.y = args[4]\n except IndexError:\n pass\n else:\n for i in kwargs.keys():\n try:\n getattr(self, i)\n except Exception as er:\n raise er\n setattr(self, i, kwargs[i])", "def update(self, *args, **kwargs):\n if args:\n if len(args) != 1:\n raise TypeError(f\"update expected at most 1 argument, got {len(args)}\")\n arg = args[0]\n if hasattr(arg, \"keys\"):\n super().update(normalize(arg, cls=self.__class__))\n else:\n try:\n for k, v in arg:\n super().update(normalize({k: v}, cls=self.__class__))\n except Exception:\n raise ValueError(\n \"dictionary update sequence element #0 has length \"\n f\"{ len(arg[0]) }; 2 is required\"\n )\n for k in kwargs:\n super().update(normalize({k: kwargs[k]}, cls=self.__class__))", "def update(self, *args, **kwargs):\n if len(args):\n for i, val in enumerate(args):\n if i == 0:\n self.id = val\n elif i == 1:\n self.size = val\n elif i == 2:\n self.x = val\n elif i == 3:\n self.y = val\n else:\n for key, val in kwargs.items():\n if key == \"id\":\n self.id = val\n if key == \"size\":\n self.size = val\n if key == \"x\":\n self.x = val\n if key == \"y\":\n self.y = val", "def update(self, **kwargs):\n for key, value in sorted(kwargs.items()):\n if value:\n if hasattr(self, key):\n setattr(self, key, value)", "def update(self, *args, **kwargs):\n list = [\"id\", \"width\", \"height\", \"x\", \"y\"]\n count = 0\n if args:\n for arg in args:\n count += 1\n if count == 1:\n self.id = arg\n elif count == 2:\n self.width = arg\n elif count == 3:\n self.height = arg\n elif count == 4:\n self.x = arg\n elif count == 5:\n self.y = arg\n elif kwargs:\n for key, value in kwargs.items():\n if key in list:\n setattr(self, key, value)\n \"\"\"\n if key == \"id\":\n self.id = value\n elif key == \"width\":\n self.__width = value\n elif key == \"height\":\n self.__height = value\n elif key == \"x\":\n self.__x = value\n elif key == \"y\":\n self.__y = value\n \"\"\"", "def auto_update_attrs_from_kwargs(method):\n\n def wrapped(self, **kwargs):\n # method signature introspection\n argspec = inspect.getargspec(method)\n defaults = argspec.defaults or ()\n nb_args, nb_defaults = len(argspec.args), len(defaults)\n # construct a dict of method's keyword arguments\n options = dict(zip(argspec.args[nb_args - nb_defaults:], defaults))\n # update it with actual keyword arguments\n options.update(kwargs)\n # update attributes of instance\n self.__dict__.update(options)\n method(self, **kwargs)\n\n return wrapped", "def update_state(self, **kwargs):\n\n for name in self.metrics:\n\n metric = self.metrics[name]\n\n argspec = inspect.getfullargspec(metric.update_state)\n\n kwargs_to_pass = {k: kwargs[k] for k in kwargs if k in argspec.args}\n\n metric.update_state(**kwargs_to_pass)", "def update(self, *args, **kwargs):\n if len(args) != 0:\n i = 1\n for arg in args:\n if i == 1:\n self.id = arg\n elif i == 2:\n self.size = arg\n elif i == 3:\n self.x = arg\n elif i == 4:\n self.y = arg\n i += 1\n elif len(kwargs) != 0:\n for arg in kwargs.keys():\n if arg == \"id\":\n self.id = kwargs.get(arg)\n if arg == \"size\":\n self.size = kwargs.get(arg)\n if arg == \"x\":\n self.x = kwargs.get(arg)\n if arg == \"y\":\n self.y = kwargs.get(arg)", "def _set_attributes(self):", "def update_args(self, a_dict):\n for k, v in a_dict.items():\n if k is \"id\":\n self.id = v\n elif k is \"width\":\n self.__width = v\n elif k is \"height\":\n self.__height = v\n elif k is \"x\":\n self.__x = v\n elif k is \"y\":\n self.__y = v", "def f_set(self, *args, **kwargs):\n if args and self.v_name is None:\n raise AttributeError(\n \"Cannot set positional value because I do not have a name!\"\n )\n for idx, arg in enumerate(args):\n valstr = self.f_translate_key(idx)\n self.f_set_single(valstr, arg)\n\n for key, arg in kwargs.items():\n self.f_set_single(key, arg)", "def update(self, *argv, **kwargs):\n if argv and len(argv) > 0:\n for counter, arg in enumerate(argv, 0):\n if counter == len(argv):\n break\n if counter == 0 and arg is not None:\n self.id = arg\n elif counter == 1:\n self.size = arg\n elif counter == 2:\n self.x = arg\n elif counter == 3:\n self.y = arg\n elif kwargs and len(kwargs) > 0:\n for k, v in kwargs.items():\n if k == \"id\" and v is not None:\n self.id = v\n elif k == \"size\":\n self.size = v\n elif k == \"x\":\n self.x = v\n elif k == \"y\":\n self.y = v", "def _update_args_and_kargs(self):\n if self.kwargs:\n self.value.append(self.args)\n self.value.append(self.kwargs)\n else:\n if self.args:\n self.value.append(self.args)", "def update_attributes_by_values(etl, update_kwargs):\r\n import arcetl\r\n func = functools.partial(etl.transform,\r\n transformation=arcetl.attributes.update_by_value)\r\n tuple(func(**kwargs) for kwargs in update_kwargs)", "def __init__(self, **initial_attributes):\n\n for attribute_name, attribute_value in initial_attributes.items():\n setattr(self, attribute_name, attribute_value)", "def update(self, *args, **kwargs):\n\n if bool(args) is True and args is not None:\n try:\n self.id = args[0]\n self.width = args[1]\n self.height = args[2]\n self.x = args[3]\n self.y = args[4]\n except IndexError:\n pass\n else:\n for i in kwargs.keys():\n if i in dir(self):\n setattr(self, i, kwargs[i])", "def __update_params(self,**kwargs):\n updatedArgSet = set(self._updateParamsArgs) & kwargs.viewkeys()\n if len(updatedArgSet) > 0:\n args = self._subDictionary(self._updateParamsArgs)\n newArgs = self._onParamsUpdate(**args)\n updatedArgs =dict()\n for k in updatedArgSet:\n try:\n updatedArgs[k] = newArgs[k]\n except:\n pass\n\n self.__dictionary.update(newArgs)\n else:\n pass", "def __init__(self, *args, **kwargs):\n self.update(*args, **kwargs)", "def update_values(self, to_update):\n for key, value in kwargs.iteritems():\n self.params[key] = value\n # update the possibly dependent parameters\n self.set_filenames()", "def __init__(self, **attributes):\n for key, value in attributes.items():\n setattr(self, key, value)", "def set_attrs(self, **kwargs) -> None:\n self._obj.coords[GEO_MAP_COORD].attrs.update(**kwargs)", "def setup(self, **kwargs):\n\n for k, v in kwargs.items():\n setattr(self, k, v)", "def part(self, **kwargs):\n for key, value in kwargs.items():\n setattr(self, key, value)", "def update(self, **kwargs):\n for key, value in kwargs.items():\n key = key.upper()\n if not hasattr(self, key):\n self.logger.info(f'[✗] Ignore unknown attribute \"{key}\"')\n else:\n setattr(self, key, value)\n self.logger.info(f'[✓] Attribute \"{key}\" has been updated to \"{value}\"')\n\n assert self.UI in self._SUPPORT_UI, 'unsupported UI'\n assert self.MODE in self._SUPPORT_MODE, 'unsupported MODE'", "def update_with_fit_args(self, **kwargs):\n pass", "def monkey_set_params(self, **args):\n self._monkey_set_params_counter += 1\n assert self._args == (args,), 'unexpected additional arguments. Keep the type in mind'", "def set(self, **inputs):\r\n for property, value in inputs.items():\r\n try:\r\n setattr(self,property,value)\r\n except:\r\n raise Exception(property + \" keyword argument not recognized\")\r\n\r\n # update values\r\n self._check_attributes()\r\n self._set_functions()", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update_config(cls, **kwargs):\n for key, val in kwargs.items():\n setattr(cls, key, val)", "def set_func_args(self, *args, **kwargs):\n self._func_args = args \n self._func_kw_args = kwargs", "def __init__(self, *args, **kwargs):\n for dictionary in [_ for _ in args if isinstance(_, dict)]:\n for key in dictionary:\n setattr(self, key, dictionary[key])\n for key in kwargs:\n setattr(self, key, kwargs[key])", "def update_attributes_by_mappings(etl, update_kwargs):\r\n import arcetl\r\n func = functools.partial(\r\n etl.transform,\r\n transformation=arcetl.attributes.update_by_mapping,\r\n )\r\n tuple(func(**kwargs) for kwargs in update_kwargs)", "def set_parameters(self, **kwargs):\n\n invalid_params = set(self.parameter_names).difference(kwargs.keys())\n if invalid_params:\n raise ValueError(\n \"unknown parameters: {}\".format(\", \".join(invalid_params))) \n \n for parameter_name, value in kwargs.items():\n setattr(self, \"_{}\".format(parameter_name), value)\n\n return kwargs", "def __init__(self, **kwargs):\n for key, val in kwargs.items():\n setattr(self, key, val)", "def __init__(self, **kwargs):\n for key, val in kwargs.items():\n setattr(self, key, val)", "def __init__(self, **kwargs):\n for key, val in kwargs.items():\n setattr(self, key, val)", "def update_attributes(self, attrs):\n try:\n _dict = self.extract_request(attrs)\n for key in _dict:\n setattr(self, key, _dict[key])\n db.session.add(self)\n db.session.commit()\n except Exception as e:\n print(e)", "def set_attributes(self, attributes):\n self.attributes = dict(attributes) # overwrite the existing registry of attributes with the input attributes", "def set_attributes(self, attributes):\n self.attributes = dict(attributes) # overwrite the existing registry of attributes with the input attributes", "def set_attributes(self, attributes):\n self.attributes = dict(attributes) # overwrite the existing registry of attributes with the input attributes", "def update_args(self, args):\n for cfg in args:\n keys, v = cfg.split('=', maxsplit=1)\n keylist = keys.split('.')\n dic = self\n for i, k in enumerate(keylist[:-1]):\n assert k in dir(dic), \"Unknown config key: {}\".format(keys)\n dic = getattr(dic, k)\n key = keylist[-1]\n oldv = getattr(dic, key)\n if not isinstance(oldv, str):\n v = eval(v)\n setattr(dic, key, v)", "def test_22_update_kwargs(self):\n r = Rectangle(10, 10, 10, 10, 1)\n r.update(height=1)\n self.assertEqual(r.__str__(), \"[Rectangle] (1) 10/10 - 10/1\")\n r.update(width=1, x=2)\n self.assertEqual(r.__str__(), \"[Rectangle] (1) 2/10 - 1/1\")\n r.update(y=1, width=2, x=3, id=89)\n self.assertEqual(r.__str__(), \"[Rectangle] (89) 3/1 - 2/1\")", "def fill(self, **kwargs):\r\n for name in kwargs.keys():\r\n setattr(self, name, kwargs[name])\r\n return self", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def __setattr__(*args):", "def update(self, *args, **kwargs):\n if args:\n self.__update(*args)\n elif kwargs:\n self.__update(**kwargs)", "def __init__(self, **kwargs):\n for key, value in kwargs.items():\n setattr(self, key, value)", "def updatekwargs(self,request):\n updated_dict={}\n if isinstance(request.query_params,QueryDict):\n updated_dict = {k:','.join(v) for k,v in request.query_params.iterlists()}\n else:\n updated_dict = request.query_params\n updated_dict.update(self.kwargs)\n\n self.kwargs = updated_dict" ]
[ "0.8080206", "0.76989526", "0.76720726", "0.763494", "0.7632453", "0.7591798", "0.7569203", "0.751074", "0.7499558", "0.7487695", "0.7420642", "0.7385164", "0.7351219", "0.728038", "0.72645634", "0.71921843", "0.71651256", "0.7086656", "0.70693654", "0.7065505", "0.70310396", "0.7028163", "0.70064986", "0.6961405", "0.685917", "0.68516433", "0.68385303", "0.6797823", "0.6775409", "0.67670935", "0.6751023", "0.67485964", "0.674102", "0.66856366", "0.66654396", "0.65819174", "0.6561252", "0.6555045", "0.65466726", "0.6541919", "0.6521925", "0.65096486", "0.65044075", "0.6487296", "0.6486764", "0.6478821", "0.6466051", "0.64605707", "0.6450012", "0.64478165", "0.6439586", "0.64087266", "0.6394038", "0.6389654", "0.63887584", "0.63798654", "0.63718003", "0.6357438", "0.6357438", "0.6357438", "0.6357438", "0.6357438", "0.6357438", "0.63401186", "0.6320991", "0.63197285", "0.62670594", "0.62619025", "0.62544626", "0.62544626", "0.62544626", "0.62523323", "0.6252298", "0.6252298", "0.6252298", "0.62518305", "0.62478834", "0.6245338", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.6245256", "0.624037", "0.6238623", "0.623568" ]
0.74814165
10
method that adding in a dictionary the attributes of the class rectangle. Return the dictionary with the attributes.
def to_dictionary(self): my_dic = { 'id': self.id, 'width': self.width, 'height': self.height, 'x': self.x, 'y': self.y } return my_dic
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rectangledict(self):\n return rectangledict(self.rectangles)", "def to_dictionary(self):\n dictionary = dict(self.__dict__)\n for key in dictionary:\n new_key = key.replace(\"_Rectangle__\", \"\")\n dictionary[new_key] = dictionary.pop(key)\n return dictionary\n \"\"\"\n dictionary = {'id': self.id, 'x': self.x, 'y': self.y,\n 'width': self.width, 'height': self.height}\n return dictionary\n \"\"\"", "def to_dictionary(self):\n _dict = dict(self.__dict__)\n dict1 = {}\n for key, value in _dict.items():\n dict1[key.replace(\"_Rectangle__\", \"\")] = value\n return dict1", "def to_dictionary(self):\n dict_rect = {}\n dict_rect[\"id\"] = self.id\n dict_rect[\"width\"] = self.width\n dict_rect[\"height\"] = self.height\n dict_rect[\"x\"] = self.x\n dict_rect[\"y\"] = self.y\n return dict_rect", "def fielddict(self):\n return self.fields.rectangledict", "def to_dictionary(self):\n rect = {\n \"x\": self.x,\n \"y\": self.y,\n \"id\": self.id,\n \"height\": self.height,\n \"width\": self.width\n }\n return(rect)", "def to_dictionary(self):\n d = {}\n for i, j in self.__dict__.items():\n if i == \"id\":\n d[\"id\"] = j\n elif i == \"_Rectangle__height\":\n d[\"size\"] = j\n elif i == \"_Rectangle__x\":\n d[\"x\"] = j\n elif i == \"_Rectangle__y\":\n d[\"y\"] = j\n return d", "def to_dictionary(self):\n dict_rect = {\n 'x': self.x, 'y': self.y, 'id': self.id,\n 'height': self.height, 'width': self.width}\n return dict_rect", "def test_to_dictionary(self):\n r = Rectangle(1, 1, 1, 1, 1)\n d = {'id': 1, 'width': 1, 'height': 1, 'x': 1, 'y': 1}\n self.assertEqual(r.to_dictionary(), d)\n r.my_fun_new_attr = 42\n self.assertEqual(r.to_dictionary(), d)", "def to_dictionary(self):\n attributes = [\"id\", \"size\", \"x\", \"y\"]\n return {key: getattr(self, key) for key in attributes}", "def testdicttorect(self):\n dicty = {\"id\": 5, \"width\": 3, \"height\": 4, \"x\": 2, \"y\": 1}\n a = Rectangle(3, 4, 2, 1, 5)\n b = Rectangle.create(**dicty)\n self.assertEqual(a.id, b.id)\n self.assertEqual(a.width, b.width)\n self.assertEqual(a.height, b.height)\n self.assertEqual(a.x, b.x)\n self.assertEqual(a.y, b.y)", "def _getAttributes(self):\n self._params = {}\n if self.interp is not None:\n # Initialize interpolation function :\n self['x'] = np.arange(0, self.pixels, 1)\n self['y'] = np.arange(0, self.pixels, 1)\n # Define newaxis :\n self['xnew'] = np.arange(0, self.pixels, self.interp)\n self['ynew'] = np.arange(0, self.pixels, self.interp)\n self['csize'] = len(self['xnew'])\n else:\n self['csize'] = self.pixels\n # Variables :\n l = int(self['csize'] / 2)\n self['l'] = l\n y, x = np.ogrid[-l:l, -l:l]\n disc = x**2 + y**2\n self['mask'] = disc < l**2\n self['nmask'] = np.invert(self['mask'])\n # self['image'] = np.tile(self.bgcolor[np.newaxis, ...], (2*l, 2*l, 1))", "def test_10_private_attr(self):\n r = Rectangle(5, 2, 10, 20, 7)\n dt = {\"_Rectangle__width\": 5, \"_Rectangle__height\": 2,\n \"_Rectangle__x\": 10, \"_Rectangle__y\": 20, \"id\": 7}\n self.assertEqual(r.__dict__, dt)", "def get_attributes(self):\n \n retdict = {}\n retdict['s'] = str(self.s)\n if self.t != None:\n retdict['t'] = str(self.t)\n retdict['a'] = str(self.a)\n retdict['b'] = str(self.b)\n retdict['c'] = str(self.c)\n retdict['d'] = str(self.d)\n return retdict", "def test_create_rectangle(self):\n d = self.r1.to_dictionary()\n r3 = Rectangle.create(**d)\n self.assertEqual(str(r3), str(self.r1))", "def to_dict(\n self,\n attributes: Iterable[str] = (\"xyz\", \"viewdir\", \"imgsz\", \"f\", \"c\", \"k\", \"p\"),\n ) -> Dict[str, tuple]:\n return {key: helpers.numpy_to_native(getattr(self, key)) for key in attributes}", "def as_bounds(self) -> Dict[str, float]:\n return {\n \"left\": self.x,\n \"top\": self.y,\n \"right\": self.x + self.width,\n \"bottom\": self.y + self.height,\n }", "def to_dictionary(self):\n dic = {}\n ls = ['id', 'size', 'x', 'y']\n for i in ls:\n dic[i] = getattr(self, i)\n return dic", "def dict_with_attrs2(*args):\n class CustomDict(object):\n __slots__ = args\n __dict__ = {}\n\n def __init__(self, *args, **kwargs):\n super(CustomDict, self).__init__()\n if args:\n self.__dict__.update(*args)\n\n for k, v in kwargs.iteritems():\n setattr(self, k, v)\n\n def __getitem__(self, key):\n return self.__dict__[key]\n\n def __setitem__(self, key, val):\n self.__dict__[key] = val\n\n def __delitem__(self, key):\n del self.__dict__[key]\n\n def __getattr__(self, name):\n return self.__dict__[name]\n\n return CustomDict", "def __init__(self, *args, **kwargs):\n super(AttrDict, self).__init__(*args, **kwargs)\n self.__dict__ = self", "def attributes(self):\n return dict(self.__attributes)", "def to_dictionary(self):\n dict_contents = [\"id\", \"size\", \"x\", \"y\"]\n new_dict = {}\n for key in dict_contents:\n new_dict[key] = getattr(self, key)\n return new_dict", "def to_dict(self):\n\n shape = super().to_dict()\n shape[\"radius\"] = self.radius\n shape[\"length\"] = self.length\n return shape", "def dict_with_attrs(*args):\n class CustomDict(dict):\n __slots__ = args\n\n def __init__(self, *args, **kwargs):\n super(CustomDict, self).__init__(*args)\n\n for k, v in kwargs.iteritems():\n setattr(self, k, v)\n\n return CustomDict", "def get_attributes(self) -> Dict[str, str]:\n pass", "def create(cls, **dictionary):\n\n if cls.__name__ == 'Rectangle':\n rec = cls(2, 3)\n\n else:\n rec = cls(2)\n\n rec.update(**dictionary)\n return rec", "def return_boxes_class_as_dict(self) -> Dict[int, Dict]:\n\n boxes_dict = {}\n for index, sg_box in enumerate(self.root.iter('object')):\n boxes_dict[index] = {\"name\": sg_box.find(\"name\").text,\n \"xmin\": int(sg_box.find(\"bndbox\").find(\"xmin\").text),\n \"ymin\": int(sg_box.find(\"bndbox\").find(\"ymin\").text),\n \"xmax\": int(sg_box.find(\"bndbox\").find(\"xmax\").text),\n \"ymax\": int(sg_box.find(\"bndbox\").find(\"ymax\").text)}\n\n return boxes_dict", "def _get_attribute_dict(self, attributes, classname=None):\n if attributes and isinstance(attributes, six.string_types):\n return {\n 'class': attributes\n }\n if not attributes:\n attributes = {}\n if not classname:\n classname = self.DEFAULT_CLASS_NAME\n attributes.setdefault('class', classname)\n return attributes", "def test_create_rect(self):\n rect1 = Rectangle(1, 2, 3)\n rect1_dict = rect1.to_dictionary()\n rect2 = Rectangle.create(**rect1_dict)\n self.assertNotEqual(rect1, rect2)", "def attributes(self):", "def dictOfDraws(self):\n return dict()", "def to_dict(self):\n\n shape = super().to_dict()\n shape[\"radius\"] = self.radius\n return shape", "def get_attributes(self):\n\t\treturn dict(list(self.__element.items()))", "def __init__(self, height, width):\n\n\t\t# _width and _height are internal (private) Rectangle Instance's attributes. This is something\n\t\t# We keep to ourselves to make sure the User can't just update these attrs randomly and also\n\t\t# so that the code has backward compatibility.\n\t\tself._width = None\n\t\tself._height = None\n\n\t\t# Lets now use the SETTER Method the width and height of the newly initialised Rectangle Class\n\t\tself.width = width\n\t\tself.height = height", "def add_attributes(self, attributes):\n self.attributes = dict(self.attributes, **attributes)", "def attributes(self):\n ...", "def create(cls, **dictionary):\n if cls.__name__ == \"Rectangle\":\n new_class = cls(1, 2)\n else:\n new_class = cls(1)\n new_class.update(**dictionary)\n return new_class", "def __init__(self, attributes: List[AttributeName], g1: G1Element, Y1: Dict[str, G1Element], g2: G2Element, X2: G2Element, Y2: Dict[AttributeName, G2Element]):\n self.attributes = attributes\n self.g1 = g1\n self.Y1 = Y1\n self.g2 = g2\n self.X2 = X2\n self.Y2 = Y2", "def _attribs(self, name=None, description=None):\n a = {}\n if name:\n a['name'] = name\n if description:\n a['description'] = description\n return a", "def rectangledict(rectangles):\n return {rectangle.n: i for i, rectangle in enumerate(rectangles)}", "def _init_attributes(self):\n self.attr = {\n 'name': None,\n 'tags': [],\n 'openHours': None,\n 'type': None,\n 'parent': None,\n 'locationId': None,\n 'bannerAbbreviation': None,\n 'arcGisAbbreviation': None,\n 'geoLocation': None,\n 'geometry': None,\n 'summary': None,\n 'description': None,\n 'descriptionHtml': None,\n 'address': None,\n 'city': None,\n 'state': None,\n 'zip': None,\n 'county': None,\n 'telephone': None,\n 'fax': None,\n 'thumbnails': [],\n 'images': [],\n 'departments': [],\n 'website': None,\n 'sqft': None,\n 'calendar': None,\n 'campus': None,\n 'girCount': None,\n 'girLimit': False,\n 'girLocations': None,\n 'synonyms': [],\n 'bldgId': None,\n 'parkingZoneGroup': None,\n 'propId': None,\n 'adaParkingSpaceCount': None,\n 'motorcycleParkingSpaceCount': None,\n 'evParkingSpaceCount': None,\n 'weeklyMenu': None,\n 'notes': None,\n 'labels': {},\n 'steward': None,\n 'shape': {}\n }", "def to_dictionary(self):\n new_dict = {'id': self.id, 'width': self.width, 'height':\n self.height, 'x': self.x, 'y': self.y}\n return new_dict", "def ToDict(self):\n atributes_dictionary = {}\n for key, value in self.__dict__.iteritems():\n atributes_dictionary[key] = value\n return atributes_dictionary", "def to_dictionary(self):\n new_dictionary = {}\n for key, value in self.__dict__.items():\n new_dictionary[key.split(\"__\")[-1]] = value\n new_dictionary['size'] = new_dictionary['width']\n del new_dictionary['width']\n del new_dictionary['height']\n return new_dictionary", "def _getAttrMap(self):\r\n if not getattr(self, 'attrMap'):\r\n self.attrMap = {}\r\n for (key, value) in self.attrs:\r\n self.attrMap[key] = value\r\n return self.attrMap", "def create(cls, **dictionary):\n if cls.__name__ == 'Rectangle':\n n = cls(1, 1)\n else:\n n = cls(1)\n n.update(**dictionary)\n return n", "def create(cls, **dictionary):\n dummy = cls(1, 1) if cls.__name__ == \"Rectangle\" else cls(1)\n dummy.update(**dictionary)\n return dummy", "def addSVGAttributes():\n\n # Can pass attributes during initialisation\n my_svg = drawSVG.SVG({'width': 80})\n\n # Or can (re)define later\n my_svg.attributes['height'] = 50\n\n my_svg.addChildElement('rect', {'width': 200, 'height': 200})\n\n return my_svg", "def _get_all_attributes(self) -> Dict[str, Any]:\n all_attributes = self.__dict__.copy()\n all_attributes.update(self.class_attributes)\n return all_attributes", "def get_attributes(self):\n _attributes = {\n 'function_id': self.function_id,\n 'hardware_id': self.hardware_id,\n 'mode_id': self.mode_id,\n 'critical_item': self.critical_item,\n 'description': self.description,\n 'design_provisions': self.design_provisions,\n 'detection_method': self.detection_method,\n 'effect_end': self.effect_end,\n 'effect_local': self.effect_local,\n 'effect_next': self.effect_next,\n 'effect_probability': self.effect_probability,\n 'hazard_rate_source': self.hazard_rate_source,\n 'isolation_method': self.isolation_method,\n 'mission': self.mission,\n 'mission_phase': self.mission_phase,\n 'mode_criticality': self.mode_criticality,\n 'mode_hazard_rate': self.mode_hazard_rate,\n 'mode_op_time': self.mode_op_time,\n 'mode_probability': self.mode_probability,\n 'mode_ratio': self.mode_ratio,\n 'operator_actions': self.operator_actions,\n 'other_indications': self.other_indications,\n 'remarks': self.remarks,\n 'rpn_severity': self.rpn_severity,\n 'rpn_severity_new': self.rpn_severity_new,\n 'severity_class': self.severity_class,\n 'single_point': self.single_point,\n 'type_id': self.type_id\n }\n\n return _attributes", "def as_dict(self):\n result = {}\n for attr in self.__attr:\n result[attr] = getattr(self, attr)\n return result", "def _set_attributes(self):", "def to_dictionary(self):\n ret_dict = OrderedDict()\n ret_dict[\"id\"] = self.id\n ret_dict[\"width\"] = self.width\n ret_dict[\"height\"] = self.height\n ret_dict[\"x\"] = self.x\n ret_dict[\"y\"] = self.y\n return dict(ret_dict)", "def update(self, *args, **kwargs):\n if args and len(args) > 0:\n if len(args) == 1:\n Base.__init__(self, args[0])\n elif len(args) == 2:\n Base.__init__(self, args[0])\n self.__width = args[1]\n elif len(args) == 3:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n elif len(args) == 4:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n elif len(args) == 5:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n self.__y = args[4]\n else:\n for key, value in kwargs.items():\n if key == 'width':\n Rectangle.width.__set__(self, value)\n elif key == 'height':\n Rectangle.height.__set__(self, value)\n elif key == 'x':\n Rectangle.x.__set__(self, value)\n elif key == 'y':\n Rectangle.y.__set__(self, value)\n elif key == 'id':\n Base.__init__(self, value)", "def to_dictionary(self):\n dictionary = {\n \"id\": self.id,\n \"size\": self.width,\n \"x\": self.x,\n \"y\": self.y\n }\n return dictionary", "def to_dictionary(self):\n dictionary = {\n \"id\": self.id,\n \"width\": self.width,\n \"height\": self.height,\n \"x\": self.x,\n \"y\": self.y\n }\n return dictionary", "def to_dictionary(self):\n return dict(id=self.id, width=self.width,\n height=self.height, x=self.x, y=self.y)", "def to_dictionary(self):\n x = super().to_dictionary()\n y = x.copy()\n y[\"size\"] = self.width\n if y[\"height\"] and y[\"width\"]:\n del y[\"height\"]\n del y[\"width\"]\n return(y)", "def to_dict(self):\n if self._dict is not None:\n return self._dict\n\n result = {}\n for key in self.ATTRIBUTES:\n value = getattr(self, key)\n if value:\n result[key] = value\n self._dict = result\n return result", "def to_dictionary(self):\n return {\n \"id\": self.id,\n \"width\": self.width,\n \"height\": self.height,\n \"x\": self.x,\n \"y\": self.y\n }", "def to_dictionary(self):\n return {\n \"id\": self.id,\n \"width\": self.width,\n \"height\": self.height,\n \"x\": self.x,\n \"y\": self.y\n }", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def to_dictionary(self):\n return {\"id\": self.id, \"width\": self.__width, \"height\": self.__height,\n \"x\": self.__x, \"y\": self.__y}", "def add_attributes(self, pore_dict, throat_dict):\n\n self.add_node_attributes(self.graph, pore_dict)\n self.add_edge_attributes(self.graph, throat_dict)\n\n self.compute_geometry()", "def to_dict(self):\n d = {}\n for attr in self.__class__.attributes:\n d[attr] = getattr(self, attr)\n return d", "def to_dictionary(self):\n return {\n 'id': self.id,\n 'size': self.width,\n 'x': self.x,\n 'y': self.y\n }", "def dict(cls):\n if cls._dict is None:\n cls._dict = {attr: getattr(cls, attr) for attr in cls.keys()}\n return cls._dict", "def attributes(self):\n raise NotImplementedError", "def to_dictionary(self):\n return ({\"id\": self.id, \"width\": self.width, \"height\": self.height,\n \"x\": self.x, \"y\": self.y})", "def attrs(self):\n return self.__dict__", "def add_attributes(self, x):\n for k, v in x.items():\n setattr(self, k, v)", "def rect(self, x, y, w, h, cls=None, style=None):\n x, y, w, h = self._meta.units(x, y, w, h)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <rect x=\"%s\" y=\"%s\" width=\"%s\" height=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, w, h, cls_str, style_str\n ))", "def rect(self, x, y, w, h, cls=None, style=None):\n x, y, w, h = self._meta.units(x, y, w, h)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <rect x=\"%s\" y=\"%s\" width=\"%s\" height=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, w, h, cls_str, style_str\n ))", "def __init__(self, **attributes):\n for key, value in attributes.items():\n setattr(self, key, value)", "def load_attrs(self):\n return loads(self.get_attr().GetObject()) or {}", "def attributes(self):\n _attrs = []\n if self.name:\n _attrs.append(\"name\")\n if self.label:\n _attrs.append(\"label\")\n if self.confidence:\n _attrs.append(\"confidence\")\n if self.index:\n _attrs.append(\"index\")\n if self.attrs:\n _attrs.append(\"attrs\")\n return _attrs + [\"points\"]", "def to_dictionary(self):\n return {'id': self.id, 'size': self.width, 'x': self.x, 'y': self.y}", "def attribute_dict(self):\n return self.__attribute_dict", "def _build_attributes(self):\n\n # We might rebuild the program because of snippets but we must\n # keep already bound attributes\n\n dtype = []\n for (name,gtype) in self.all_attributes:\n if name not in self._attributes.keys():\n attribute = Attribute(self, name, gtype)\n else:\n attribute = self._attributes[name]\n\n self._attributes[name] = attribute\n dtype.append(attribute.dtype)", "def to_dictionary(self):\n s = self\n return {'id': s.id, 'x': s.x, 'size': s.width, 'y': s.y}", "def _attrs_map(self) -> \"dict[int, str]\":\n return {i: attr.name for i, attr in enumerate(self._attrs())}", "def dict(self) -> dict():\n\n dict_reg_hive = {}\n\n for _attribute in self.attributes.__dict__.items():\n if isinstance(_attribute[1], str):\n if not True in [_attribute[1].startswith(prefix) for prefix in ['<', 'providers.', 'None']]:\n _attribute_value = getattr(self, _attribute[1])\n dict_reg_hive.update({_attribute[1]: _attribute_value})\n\n return dict_reg_hive", "def to_dictionary(self):\n\n return {\n \"id\": self.id,\n \"width\": self.width,\n \"height\": self.height,\n \"x\": self.x,\n \"y\": self.y\n }", "def to_dict(self) -> Dict:\n _dict = {}\n if hasattr(self, 'attributes') and self.attributes is not None:\n _dict['attributes'] = [x.to_dict() for x in self.attributes]\n return _dict", "def test_update_kwargs(self):\n r = Rectangle(5, 2)\n d = r.__dict__.copy()\n\n r.update(id=10)\n d[\"id\"] = 10\n self.assertEqual(r.__dict__, d)\n\n r.update(width=5)\n d[\"_Rectangle__width\"] = 5\n self.assertEqual(r.__dict__, d)\n\n r.update(height=17)\n d[\"_Rectangle__height\"] = 17\n self.assertEqual(r.__dict__, d)\n\n r.update(x=20)\n d[\"_Rectangle__x\"] = 20\n self.assertEqual(r.__dict__, d)\n\n r.update(y=25)\n d[\"_Rectangle__y\"] = 25\n self.assertEqual(r.__dict__, d)", "def GetAttributes(self):\n return dict(self._attrs)", "def test_hasattrs(self):\n self.assertTrue(hasattr(self.obj, \"id\"), \"created obj doesn't \" +\n \"have the attribute id.\")\n self.assertTrue(hasattr(self.obj, \"_Rectangle__width\"), \"created \" +\n \"obj doesn't have the attribute width.\")\n self.assertTrue(hasattr(self.obj, \"_Rectangle__height\"), \"created \" +\n \"obj have the attribute height.\")\n self.assertTrue(hasattr(self.obj, \"_Rectangle__x\"), \"created obj \" +\n \"doesn't have the attribute x.\")\n self.assertTrue(hasattr(self.obj, \"_Rectangle__y\"), \"created \" +\n \"obj doesn't have the attribute y.\")", "def add_attributes_from_dict(self, dict):\n for key in dict:\n val = dict[key]\n if hasattr(self, key):\n setattr(self, key, val)", "def get_dic(self):\n dic = {\n 'size': self.size,\n 'bounds': self.bounds,\n 'visible': self.visible,\n 'is_static': self.is_static,\n 'options': self.options,\n 'primitive_type': self.primitive_type,\n 'constrain_ratio': self.constrain_ratio,\n 'constrain_navigation': self.constrain_navigation,\n 'framebuffer': self.framebuffer,\n # 'beforeclear': self.beforeclear,\n 'variables': self.get_variables_list(),\n 'vertex_shader': self.vertex_shader,\n 'fragment_shader': self.fragment_shader,\n }\n return dic", "def initSlotObjectDict(cls):\n restslotattributedict.update(dict({extension_tunnel: \"name\"}))\n restslotattributedict.update(dict({extension_circuit: \"name\"}))\n restslotattributedict.update(dict({extension_ip_interface: \"name\"}))\n restslotattributedict.update(dict({extension_ip_route: \"name\"}))\n restslotattributedict.update(dict({gigabitethernet: \"name\"}))\n restslotattributedict.update(dict({blade: \"slot_number\"}))", "def getAttributes(self):\n pass", "def to_dictionary(self):\n new_dict = {}\n new_dict['id'] = self.id\n new_dict['size'] = self.size\n new_dict['x'] = self.x\n new_dict['y'] = self.y\n return new_dict", "def get_attributes(cls):\r\n return [Attribute('size', '20'),\r\n Attribute('label', ''), ]", "def get_class_attributes(cls) -> Dict[str, Tuple[Any, str]]:\n try:\n source = inspect.getsource(cls.__init__) or \"\"\n if not source:\n return {}\n except TypeError:\n return {}\n source = utils.join(source.split(\"\\n\"))\n node = ast.parse(source)\n\n attr_list: List[Tuple] = []\n module = importlib.import_module(cls.__module__)\n globals = dict(inspect.getmembers(module))\n for x in ast.walk(node):\n if isinstance(x, _ast.AnnAssign):\n attr, lineno, type_str = parse_annotation_assign(x)\n type = eval(type_str, globals)\n attr_list.append((attr, lineno, type))\n if isinstance(x, _ast.Attribute) and isinstance(x.ctx, _ast.Store):\n attr_list.append(parse_attribute_with_lineno(x))\n attr_list = sorted(attr_list, key=lambda x: x[1])\n\n attrs: Dict[str, Tuple[Any, str]] = {}\n lines = source.split(\"\\n\")\n for name, lineno, *type in attr_list:\n if name.startswith(\"self.\"):\n name = name[5:]\n desc = get_description(lines, lineno)\n if type:\n attrs[name] = type[0], desc # Assignment with type annotation wins.\n elif name not in attrs:\n attrs[name] = None, desc\n return attrs", "def get_attributes(self, shape):\n attributes = {}\n identifier_names = [i.name for i in self.identifiers]\n\n for name, member in shape.members.items():\n snake_cased = xform_name(name)\n if snake_cased in identifier_names:\n # Skip identifiers, these are set through other means\n continue\n snake_cased = self._get_name(\n 'attribute', snake_cased, snake_case=False\n )\n attributes[snake_cased] = (name, member)\n\n return attributes", "def test_attr_dict(self):\n obj = awstats_reader.AttrDict([('this','that'), ('thus','those')])\n self.assertEqual(obj.thus, 'those')", "def to_dictionary(self):\n list_dic = {}\n list_dic['id'] = self.id\n list_dic['width'] = self.__width\n list_dic['height'] = self.__height\n list_dic['x'] = self.__x\n list_dic['y'] = self.__y\n return (list_dic)", "def dictOfDraws(self):\n return {self.name: self.drawType}" ]
[ "0.7408595", "0.72013086", "0.68213564", "0.67093587", "0.6664881", "0.66571337", "0.6597422", "0.6558994", "0.64552194", "0.6355584", "0.6302567", "0.6284163", "0.62188774", "0.61945736", "0.60779554", "0.6066608", "0.60477173", "0.6038226", "0.60199076", "0.60147053", "0.6009421", "0.6004594", "0.59905785", "0.59706867", "0.59566575", "0.59429157", "0.59174204", "0.59095067", "0.59058684", "0.5867348", "0.58625", "0.5847483", "0.58330876", "0.5819669", "0.58192843", "0.58187443", "0.5812518", "0.5802248", "0.57984227", "0.57958424", "0.5793255", "0.5792497", "0.5767644", "0.5765075", "0.57275236", "0.5665942", "0.56654584", "0.5660683", "0.56535053", "0.5638646", "0.56303495", "0.561938", "0.56011593", "0.56008124", "0.5583373", "0.557825", "0.55695033", "0.5557652", "0.5550923", "0.5545344", "0.5545344", "0.5543113", "0.5543113", "0.5543113", "0.5532016", "0.55263656", "0.5525096", "0.552431", "0.55232036", "0.5518331", "0.5517971", "0.5515708", "0.5505697", "0.5501624", "0.5501624", "0.5499741", "0.5499602", "0.5496143", "0.54918206", "0.5486845", "0.54746944", "0.54725367", "0.54715985", "0.547059", "0.5463549", "0.54586214", "0.54476064", "0.54474354", "0.5446329", "0.5443876", "0.54393405", "0.542535", "0.5416544", "0.5410787", "0.5406234", "0.5401589", "0.54004514", "0.5395976", "0.53952926", "0.539174" ]
0.56820136
45
Test the clear method works for NTbased systems
def test_clear_windows(self): with mock.patch("hangman.cli.screen.os.system") as mock_system: hangman.cli.screen.Screen.clear() mock_system.assert_called_with("cls")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear():\r\n if name == 'nt':\r\n _ = system('cls')\r\n else:\r\n _ = system('clear')", "def clear(): \n if os.name == \"nt\":\n os.system(\"cls\")\n else:\n os.system(\"clear\")", "def clear():\n if os.name == 'nt': \n os.system('cls') \n else: \n os.system('clear')", "def clear():\n\n # windows \n if os.name == \"nt\": \n _ = os.system(\"cls\") \n # mac and linux\n else: \n _ = os.system(\"clear\")", "def test_clear_posix(self):\n with mock.patch(\"hangman.cli.screen.os.system\") as mock_system:\n hangman.cli.screen.Screen.clear()\n mock_system.assert_called_with(\"clear\")", "def clear():\n if \"Windows\" in system():\n call(\"cls\")\n else:\n call(\"clear\")", "def clear():\n if platform.system() == \"Windows\":\n os.system('cls')\n elif platform.system() == \"Linux\":\n os.system('clear')", "def test_clear(self):\n self.assertTrue(self.ec.clear())", "def clear():\n\n os.system(\"clear\")", "def clear():", "def clear() -> None:\n\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear() -> None:\n\n os.system('cls' if os.name == 'nt' else 'clear')", "def clean():\n if system() == 'Windows':\n os.system('cls')\n else:\n os.system('clear')", "def clear():\r\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear():\n os.system('cls' if os.name == 'nt' else 'clear')", "def reset():\n if os.name == \"posix\": #In linux\n os.system(\"clear\")\n elif os.name == (\"ce\", \"nt\", \"dos\"): #In windows\n os.system(\"cls\")", "def do_clear(self, arg):\r\n if platform.system == \"Windows\":\r\n os.system(\"cls\")\r\n else:\r\n os.system(\"clear\")", "def cleanup(self):\n\n self.PLC['1'].set_plc_mode(0)\n self.PLC['1'].plc_clear('all')\n super(Test200SmartSanityClear005, self).cleanup()", "def test_verify_clear(self):\n self._verify([self.applied_commands['clear']])", "def test_005_clear(self):\n HEADING()\n db = self.db\n db.connect()\n\n db.clear()\n\n # assert not os.path.isfile(path_expand(\"~/.cloudmesh/pbs/pbs.db\"))\n assert(len(db) == 0)", "def clear():\n sub.call('cls', shell=True)", "def do_clear(self, line):\n\t if os.name == 'nt':\n\t os.system('cls')\n\t else:\n\t os.system('clear')", "def clear_screen():\n if name == \"nt\":\n system('cls')\n else:\n system('clear')", "def clear(self) -> None:", "def clearscreen():\n if os.name == 'nt':\n os.system('cls')\n elif os.name == 'posix':\n os.system('clear')\n else:\n print \"Untested OS. Please tell the developer you're on: %s\" % os.name \n sys.exit(0)", "def clear_dtc(self):\n self.send_command(CLEAR_DTC_COMMAND) \n r = self.get_result()\n return r", "def clear(self) -> None:\n pass", "def _clear_screen():\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def Clear(self) -> None:", "def clear_screen():\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def clear_screen():\n if os.name == 'nt':\n os.system(\"cls\")\n else:\n os.system(\"clear\")", "def do_clear(self, args):\n if (len(args.split()) > 0):\n self.__bad_arguments(\"clear\")\n else:\n os.system('clear')", "def clear(self) -> None:\n ...", "def clear():\n try:\n try:\n # For Macs and Linux\n os.system('clear');\n except:\n # For Windows REPORTED BUG: Sometimes does not work on 64 bit Windows\n os.system('cls');\n except:\n # If nothing else works, a hacky, non optimal solution\n for i in range(50): print(\"\")", "def clear(self):\n ...", "def test_clear(self):\n pkg = make_package()\n key = self.db.redis_key(pkg.filename)\n self.redis[key] = \"foobar\"\n self.db.clear(pkg)\n val = self.redis.get(key)\n self.assertIsNone(val)\n count = self.redis.scard(self.db.redis_set)\n self.assertEqual(count, 0)", "def clear(self):\n pass", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clearTerminal():\r\n os.system('cls' if os.name == 'nt' else 'clear')", "def test_install_clear(self):\n # TODO: clearing a node doesn't seem to work with the XML lens\n #\n # % augtool -b\n # augtool> set /augeas/load/Xml/incl[3] \"/tmp/test.xml\"\n # augtool> load\n # augtool> clear '/files/tmp/test.xml/Test/Text/#text'\n # augtool> save\n # error: Failed to execute command\n # saving failed (run 'print /augeas//error' for details)\n # augtool> print /augeas//error\n #\n # The error isn't useful.\n pass", "def Clear(self):\n pass", "def cls(self):\n os.system('clear')", "async def clear(self):", "async def clear_all(self) -> None:", "def clearConsole():\r\n\r\n command = 'clear' # command for console clearing\r\n if os.name in ('nt', 'dos'): # if the machine is running on Windows, then use cls\r\n command = 'cls'\r\n os.system(command) # othen than Windows, use clear\r", "def hard_reset() -> NoReturn:", "def clear(self): # real signature unknown; restored from __doc__\n pass", "def test_clear(self):\n from supvisors.statistics import StatisticsInstance\n instance = StatisticsInstance(17, 10)\n # change values\n instance.counter = 28\n instance.ref_stats = ('dummy', 0)\n instance.cpu = [13.2, 14.8]\n instance.mem = [56.4, 71.3, 68.9]\n instance.io = {'eth0': (123465, 654321), 'lo': (321, 321)}\n instance.proc = {('myself', 5888): (25.0, 12.5)}\n # check clearance\n instance.clear()\n self.assertEqual(3, instance.period)\n self.assertEqual(10, instance.depth)\n self.assertEqual(-1, instance.counter)\n self.assertIsNone(instance.ref_stats)\n self.assertIs(list, type(instance.cpu))\n self.assertFalse(instance.cpu)\n self.assertIs(list, type(instance.mem))\n self.assertFalse(instance.mem)\n self.assertIs(dict, type(instance.io))\n self.assertFalse(instance.io)\n self.assertIs(dict, type(instance.proc))\n self.assertFalse(instance.proc)", "def _doResetMemory(self):\n self._cmdClearMemory()\n time.sleep(1)\n self._cmdResetParameters()\n time.sleep(1)", "def test_deallocate_virt_realm(self):\n pass", "def clear_terminal(self):\n os.system('clear')", "def screen_clear():\n from subprocess import call\n import os\n call('clear' if os.name == 'posix' else 'cls')", "def test_destroy_nas_share(self):\n pass", "def clear_storage(self):\r\n raise NotImplementedError('override me')", "def clear(self):\n self.call('clear')", "def clear_datastore():\n local('lib/remote_api_shell.py tweetlocker -p /_/shell -c '\n '\"from lib.utils import clear_datastore; clear_datastore()\"',\n capture=False)", "def soft_reset():", "def clear_screen() -> None:\n os.system(\"cls\" if os.name == \"nt\" else \"clear\")", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if platform.system().lower()==\"windows\" else \"clear\"\n\n # Action\n return subprocess.call(command) == 0", "def clear_screen():\n os.system(\"cls\" if os.name == 'nt' else 'clear')", "def clear_screen():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear(self):\n self.cmd(0x33) # $33 8-bit mode\n self.cmd(0x32) # $32 8-bit mode\n self.cmd(0x28) # $28 8-bit mode\n self.cmd(0x0C) # $0C 8-bit mode\n self.cmd(0x06) # $06 8-bit mode\n self.cmd(0x01) # $01 8-bit mode", "def test_clear_cache(self):\n api_helpers.clear_cache()", "def test_clear(single_bucket): # pylint: disable=redefined-outer-name\n single_bucket.clear()\n\n assert single_bucket.is_empty() is True", "def clear(self):\n pass", "def clear(self):\n pass", "def clear(self):\n pass", "def clear_path(self):\n self.write(CLEAR + END_COMMAND)", "def full_reset(self):\n self.at_cmd('CFUN=1')", "def clear(self, cacheDir):", "def clear_screen(self):\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen(self):\n os.system('cls' if os.name == 'nt' else 'clear')", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def Clear(self): # real signature unknown; restored from __doc__\n pass", "def hard_reset(self) -> None:\n os.system('rm -fr \"$HOME/.daf/\"')", "def reset():", "def reset():", "def reset():", "def test_clear_cache_silent_fail():\n shutil.rmtree(yvs.cache.LOCAL_CACHE_DIR_PATH)\n yvs.main()\n case.assertFalse(\n os.path.exists(yvs.cache.LOCAL_CACHE_DIR_PATH),\n 'local cache directory exists')", "def clear(self):\r\n if self.fs_type == 'FAT':\r\n for file_entry in self.metadata.get_files():\r\n file_metadata = file_entry['metadata']\r\n file_metadata = FATAllocatorMeta(file_metadata)\r\n self.fs.clear(file_metadata)\r\n elif self.fs_type == 'NTFS':\r\n for file_entry in self.metadata.get_files():\r\n file_metadata = file_entry['metadata']\r\n file_metadata = NTFSAllocatorMeta(file_metadata)\r\n self.fs.clear(file_metadata)\r\n else:\r\n raise NotImplementedError()", "def clear(self):\n self.initialize()\n self.device_disconnect()", "def clear_console():\n os.system('cls' if os.name == 'nt' else \"clear\")" ]
[ "0.7493977", "0.7483117", "0.7462642", "0.7444495", "0.7319081", "0.72657627", "0.7236104", "0.7170008", "0.7141984", "0.69726455", "0.6926334", "0.6926334", "0.6907976", "0.69050306", "0.6864983", "0.6864983", "0.6850608", "0.6749582", "0.67168456", "0.6636215", "0.65972525", "0.6584971", "0.6552358", "0.65268767", "0.65266365", "0.651219", "0.64875495", "0.6480382", "0.6410371", "0.6392413", "0.6392413", "0.6392413", "0.6392413", "0.6392413", "0.6392413", "0.6392413", "0.63759226", "0.6375251", "0.6371682", "0.6345436", "0.63445395", "0.6337489", "0.6337467", "0.632015", "0.62718874", "0.6266004", "0.6266004", "0.6266004", "0.62646914", "0.6262868", "0.62544376", "0.6224928", "0.619339", "0.6164782", "0.6138697", "0.6110238", "0.6096481", "0.6096189", "0.60940677", "0.60832155", "0.6058359", "0.6047616", "0.6047445", "0.60397744", "0.603874", "0.60346776", "0.6030824", "0.6025344", "0.6000028", "0.5997939", "0.5991723", "0.5991723", "0.59775925", "0.5966033", "0.5961285", "0.5957571", "0.5957571", "0.5957571", "0.59330255", "0.59253246", "0.5923861", "0.59214026", "0.59214026", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.59204054", "0.58750093", "0.58739364", "0.58739364", "0.58739364", "0.58651924", "0.5854878", "0.5845574", "0.583989" ]
0.6814909
17
Test the clear method works for posixbased systems
def test_clear_posix(self): with mock.patch("hangman.cli.screen.os.system") as mock_system: hangman.cli.screen.Screen.clear() mock_system.assert_called_with("clear")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear():\n\n # windows \n if os.name == \"nt\": \n _ = os.system(\"cls\") \n # mac and linux\n else: \n _ = os.system(\"clear\")", "def clear(): \n if os.name == \"nt\":\n os.system(\"cls\")\n else:\n os.system(\"clear\")", "def clear():\r\n if name == 'nt':\r\n _ = system('cls')\r\n else:\r\n _ = system('clear')", "def clear():\n if os.name == 'nt': \n os.system('cls') \n else: \n os.system('clear')", "def clear():\n if platform.system() == \"Windows\":\n os.system('cls')\n elif platform.system() == \"Linux\":\n os.system('clear')", "def clear():\n\n os.system(\"clear\")", "def clear():\n if \"Windows\" in system():\n call(\"cls\")\n else:\n call(\"clear\")", "def reset():\n if os.name == \"posix\": #In linux\n os.system(\"clear\")\n elif os.name == (\"ce\", \"nt\", \"dos\"): #In windows\n os.system(\"cls\")", "def clear() -> None:\n\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear() -> None:\n\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear():\r\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clean():\n if system() == 'Windows':\n os.system('cls')\n else:\n os.system('clear')", "def do_clear(self, line):\n\t if os.name == 'nt':\n\t os.system('cls')\n\t else:\n\t os.system('clear')", "def clearscreen():\n if os.name == 'nt':\n os.system('cls')\n elif os.name == 'posix':\n os.system('clear')\n else:\n print \"Untested OS. Please tell the developer you're on: %s\" % os.name \n sys.exit(0)", "def do_clear(self, arg):\r\n if platform.system == \"Windows\":\r\n os.system(\"cls\")\r\n else:\r\n os.system(\"clear\")", "def clear():", "def test_clear_windows(self):\n with mock.patch(\"hangman.cli.screen.os.system\") as mock_system:\n hangman.cli.screen.Screen.clear()\n mock_system.assert_called_with(\"cls\")", "def clear_screen():\n if name == \"nt\":\n system('cls')\n else:\n system('clear')", "def clear():\n try:\n try:\n # For Macs and Linux\n os.system('clear');\n except:\n # For Windows REPORTED BUG: Sometimes does not work on 64 bit Windows\n os.system('cls');\n except:\n # If nothing else works, a hacky, non optimal solution\n for i in range(50): print(\"\")", "def clear():\n sub.call('cls', shell=True)", "def clear_screen():\n if os.name == 'nt':\n os.system(\"cls\")\n else:\n os.system(\"clear\")", "def clear_screen():\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def _clear_screen():\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def clearTerminal():\r\n os.system('cls' if os.name == 'nt' else 'clear')", "def do_clear(self, args):\n if (len(args.split()) > 0):\n self.__bad_arguments(\"clear\")\n else:\n os.system('clear')", "def screen_clear():\n from subprocess import call\n import os\n call('clear' if os.name == 'posix' else 'cls')", "def test_clear(self):\n self.assertTrue(self.ec.clear())", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if system_name().lower()==\"windows\" else \"clear\"\n\n # Action\n system_call(command)", "def clear_terminal(self):\n os.system('clear')", "def clearConsole():\r\n\r\n command = 'clear' # command for console clearing\r\n if os.name in ('nt', 'dos'): # if the machine is running on Windows, then use cls\r\n command = 'cls'\r\n os.system(command) # othen than Windows, use clear\r", "def clear_screen():\r\n if os.name in ('nt','dos'):\r\n os.system(\"cls\")\r\n elif os.name in ('linux','osx','posix'):\r\n os.system(\"clear\")\r\n else:\r\n print(\"\\n\") * 120", "def command_clearterm():\n subprocess.call(\"reset\")", "def clear_screen() -> None:\n os.system(\"cls\" if os.name == \"nt\" else \"clear\")", "def clear_screen():\n\n # Clear command as function of OS\n command = \"cls\" if platform.system().lower()==\"windows\" else \"clear\"\n\n # Action\n return subprocess.call(command) == 0", "def clear(self) -> None:", "def clear_screen():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen():\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen():\n os.system(\"cls\" if os.name == 'nt' else 'clear')", "def clear(self) -> None:\n pass", "def cls(self):\n os.system('clear')", "def clear_console():\n import os\n clear = lambda: os.system('cls')\n clear()\n return None", "def clear_path(self):\n self.write(CLEAR + END_COMMAND)", "def clear_console():\n os.system('cls' if os.name == 'nt' else \"clear\")", "def clear_console():\n os.system('cls' if os.name == 'nt' else 'clear')", "def hard_reset(self) -> None:\n os.system('rm -fr \"$HOME/.daf/\"')", "def clear():\n\n if not CLEAR_PRINT[0]:\n try:\n if os.name == \"nt\":\n # For windows.\n os.system(\"cls\")\n\n elif os.name == \"posix\":\n # For mac/linux.\n os.system(\"clear\")\n\n else:\n # Unknown operating system, just print a newline a bunch of times.\n print(\"\\n\" * CLEAR_PRINT[1])\n\n except:\n # Can't figure out the operating system, safest bet is to just print a newline a bunch of times.\n print(\"\\n\" * CLEAR_PRINT[1])\n\n else:\n # The clearing of screen is overriden, so we just print a newline CLEAR_PRINT[1] times.\n print(\"\\n\" * CLEAR_PRINT[1])", "def test_verify_clear(self):\n self._verify([self.applied_commands['clear']])", "def clear_screen(self):\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen(self):\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def clear(self):", "def Clear(self) -> None:", "def hard_reset() -> NoReturn:", "def clear(self) -> None:\n ...", "def cleanup(self):\n\n self.PLC['1'].set_plc_mode(0)\n self.PLC['1'].plc_clear('all')\n super(Test200SmartSanityClear005, self).cleanup()", "def clear(self):\n ...", "def clear():\n sys.stdout.write('\\033[2J')\n sys.stdout.write('\\033[H')\n sys.stdout.flush()", "def clear(self):\n pass", "def soft_reset():", "def clear():\n clear_output()", "def _do_clear(self):\n print()\n print()\n console.rule()\n os.system(\"cls\" if os.name in (\"nt\", \"dos\") else \"clear\")\n self.history_manager.remove_items(n=1)", "async def clear_all(self) -> None:", "def test_005_clear(self):\n HEADING()\n db = self.db\n db.connect()\n\n db.clear()\n\n # assert not os.path.isfile(path_expand(\"~/.cloudmesh/pbs/pbs.db\"))\n assert(len(db) == 0)", "def wipe(self):", "def wipe(self):", "def cls():\n # TODO: Check if this covers all systems\n os.system('cls' if os.name == 'nt' else 'clear')", "def clear_screen():\n os.system('cls')", "async def clear(self):", "def test_clear(self):\n pkg = make_package()\n key = self.db.redis_key(pkg.filename)\n self.redis[key] = \"foobar\"\n self.db.clear(pkg)\n val = self.redis.get(key)\n self.assertIsNone(val)\n count = self.redis.scard(self.db.redis_set)\n self.assertEqual(count, 0)", "def check_clear_flags(self):\n self._command(self.commands[\"CLEAR_ERROR_FLAGS\"])\n self._command(self.commands[\"CLEAR_REBOOTED_FLAG\"])", "def Clear(self):\n pass", "def wipe(self):\n self.console.clear()", "def resetTerminal():\n sys.stdout.write('\\n\\n') # add a few blank lines\n sys.stdout.flush()\n if os.name == 'nt':\n os.system('cls')\n else:\n os.system('clear')", "def clear_dtc(self):\n self.send_command(CLEAR_DTC_COMMAND) \n r = self.get_result()\n return r", "def clear(self):\n self.cmd(0x33) # $33 8-bit mode\n self.cmd(0x32) # $32 8-bit mode\n self.cmd(0x28) # $28 8-bit mode\n self.cmd(0x0C) # $0C 8-bit mode\n self.cmd(0x06) # $06 8-bit mode\n self.cmd(0x01) # $01 8-bit mode", "def clear(self):\n self.call('clear')", "def clearScreen():\n pass", "def clear(self): # real signature unknown; restored from __doc__\n pass", "def clear():\n # TODO: this should actually create a stack of output so I can test each screen\n lines.clear()", "def clear(self, cacheDir):", "def reset():", "def reset():", "def reset():", "def clear_sessions():\n call_command(\"clearsessions\")", "def do_clear(self):\n return self._do_clear()", "def test_07_erase(self, mock_shred, mock_unlink,\n mock_config, mock_verks):\n self._init()\n udocker.Config = mock_config\n udocker.Config.tmpdir = \"/tmp\"\n kstore = udocker.KeyStore(\"filename\")\n self.assertTrue(kstore.erase())\n mock_unlink.assert_called_once_with(\"filename\")", "def clearContactsFromPhone():\n\tprint \"Deleting any contacts from phone...\"\n\tcmd =r\"adb shell pm clear com.android.providers.contacts\"\n\tos.system(cmd)\n\tprint \"Finished deleting contacts from phone.\"", "def test_install_clear(self):\n # TODO: clearing a node doesn't seem to work with the XML lens\n #\n # % augtool -b\n # augtool> set /augeas/load/Xml/incl[3] \"/tmp/test.xml\"\n # augtool> load\n # augtool> clear '/files/tmp/test.xml/Test/Text/#text'\n # augtool> save\n # error: Failed to execute command\n # saving failed (run 'print /augeas//error' for details)\n # augtool> print /augeas//error\n #\n # The error isn't useful.\n pass", "def console_clear(wait_time):\n\n sleep(wait_time) # Produces a delay based on input passed through console_clear()\n\n # These commands only work in the terminal\n try:\n system(\"cls\") # Clears console for users on Windows operating system\n\n except:\n system(\"clear\") # Clears console for users on Mac and Linux operating systems", "def clear(self):\r\n if self.fs_type == 'FAT':\r\n for file_entry in self.metadata.get_files():\r\n file_metadata = file_entry['metadata']\r\n file_metadata = FATAllocatorMeta(file_metadata)\r\n self.fs.clear(file_metadata)\r\n elif self.fs_type == 'NTFS':\r\n for file_entry in self.metadata.get_files():\r\n file_metadata = file_entry['metadata']\r\n file_metadata = NTFSAllocatorMeta(file_metadata)\r\n self.fs.clear(file_metadata)\r\n else:\r\n raise NotImplementedError()", "def clear(self):\n yield from self.command('clear')\n return True" ]
[ "0.7761506", "0.7746805", "0.7741762", "0.772958", "0.7596242", "0.74570656", "0.7336103", "0.7284548", "0.72207433", "0.72207433", "0.71165365", "0.7097402", "0.7097402", "0.70822984", "0.6909185", "0.68754077", "0.6858489", "0.6781232", "0.6764691", "0.6755633", "0.67161864", "0.6657783", "0.6607532", "0.6605387", "0.6574682", "0.6562531", "0.65448666", "0.6538169", "0.64280653", "0.6423562", "0.6423562", "0.6423562", "0.64147186", "0.64021635", "0.6331868", "0.63302624", "0.62976366", "0.6290976", "0.6233927", "0.6231732", "0.6231732", "0.62286365", "0.6205468", "0.62040144", "0.62022376", "0.61817956", "0.6177993", "0.61528873", "0.61147636", "0.6072072", "0.60658866", "0.6052718", "0.6052718", "0.60249156", "0.60249156", "0.60249156", "0.60249156", "0.60249156", "0.60249156", "0.60249156", "0.60103047", "0.5991357", "0.59805685", "0.596574", "0.59460455", "0.5943338", "0.59308183", "0.5907436", "0.5906773", "0.5897454", "0.5887811", "0.5884284", "0.5870447", "0.5870447", "0.5867351", "0.583779", "0.5836512", "0.5831621", "0.5822333", "0.5819185", "0.57965696", "0.5790192", "0.5775927", "0.57735455", "0.57726014", "0.5766889", "0.57565993", "0.57529926", "0.574716", "0.5729997", "0.5729997", "0.5729997", "0.5712512", "0.57035536", "0.5692702", "0.56743574", "0.5663656", "0.5663273", "0.56609666", "0.5657608" ]
0.79437184
0
Test to see if the gallows method returns the correct image
def test_gallows_within_bounds(self): with mock.patch("hangman.cli.screen.print") as mock_print: for index in range(len(hangman.cli.screen._GALLOWS)): hangman.cli.screen.Screen.gallows(index) mock_print.assert_called_with(hangman.cli.screen._GALLOWS[index])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_image_display(self):\n\n result = self.client.get(\"/select_image\")\n\n self.assertIn(b\"/static/uploads/girl-glowing-skin-blue-eyes.jpg\", result.data)", "def image(self):\n return self.any_image(-1)", "def test_Image():\n assert Image(cur, \"Simple_Linear\").detect_image() == True\n assert Image(cur, \"Logistic_Linear\").detect_image() == False\n assert Image(cur, \"Simple_Linear\").date == \"2021-04-20\"\n assert Image(cur, \"Breslow-Day_Test\").source == \"Course BIOSTAT703 slide\"", "def test_check_wrong_image(self):\n result = analyzer.check_image_color(\"tests/test_files/non_exists.jpg\")\n self.assertEqual(result, \"Image not found\")", "def test_badge_should_have_image(self):\n\n badge = self.get_sample_badge()\n # It's a string, even though it often looks like a URL\n self.assertIsInstance(badge.image, str)", "def test_read_image(self):\n pass", "def test_is_image(self):\n os.chdir(\"testimages/\")\n self.assertTrue(fileactions.is_image(\"arch_001.jpg\"))\n self.assertFalse(fileactions.is_image(\"not_an_image.jpg\"))", "def test_thresholded_image(self):\n orig_size = self._image.size\n self._api.SetImage(self._image)\n image = self._api.GetThresholdedImage()\n self.assertIsNot(image, None)\n self.assertIsInstance(image, Image.Image)\n self.assertEqual(image.size, orig_size)\n self.assertEqual(self._api.GetThresholdedImageScaleFactor(), 1)", "def ff_correct_image(image):\n pass", "def ff_correct_image(image):\n pass", "def is_valid_image(image):\n if image not in AVAILABLE_IMAGES.keys():\n return False\n\n return True", "def test_patch_image(self):\n pass", "def test_one_image(self, img):\n return self.__image_pipeline(img)", "def brain_has_lead_image(self, brain=None):", "def test_replace_image(self):\n pass", "def has_legacy_image(self):\n pass", "def has_legacy_image(self):\n pass", "def test_get_g():\n\n assert get_g(100, 143, 255) != 100\n assert get_g(100, 143, 255) == 143\n assert get_g(100, 143, 255) != 255", "def getimage(self):", "def testImageHandling(self):\n \n pm = getToolByName(self.portal, 'portal_membership')\n #make sure the person's member portrait isn't defined\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n # Delete the (nonexistant) image, make sure the portrait stays undefined\n self.person.setImage('DELETE_IMAGE')\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n self.person.setImage(TEST_GIF, content_type=\"image/gif\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_GIF)\n # Try to get a 10x10 version of the image\n imageOfSizeTag = self.person.getImageOfSize(10, 10)\n self.failUnlessEqual(imageOfSizeTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"10\" width=\"10\" />')\n self.failUnlessEqual(pm.getPersonalPortrait('abc123').__name__, 'abc123')\n \n # Try to get a scaled-by-ratio image with a width of 100.\n scaledImageTag = self.person.getScaledImageByWidth(100)\n self.failUnlessEqual(scaledImageTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"150\" width=\"100\" />')\n \n # Delete the image, make sure the portrait is deleted as well\n self.person.setImage('DELETE_IMAGE')\n self.failUnless(pm.getPersonalPortrait('abc123').__name__ in ['defaultUser.gif', 'defaultUser.png'])\n \n #self.person.setImage(TEST_JPEG, content_type=\"image/jpeg\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_JPEG)\n \n self.person.setImage(TEST_TIFF, content_type=\"image/tiff\")\n #self.failUnlessEqual(self.person.getImage().data, TEST_TIFF)\n # Try to get a 10x10 version of the image\n imageOfSizeTag = self.person.getImageOfSize(10, 10)\n self.failUnlessEqual(imageOfSizeTag, '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"10\" width=\"10\" />')\n \n # Try to get a scaled-by-ratio image with a width of 100.\n # TIFF handling in Plone is broken (probably the fault of PIL), handle the problem nicely.\n scaledImageTag = self.person.getScaledImageByWidth(100)\n self.failUnless(scaledImageTag == '<img src=\"http://nohost/plone/facstaffdirectory/abc123/image\" alt=\"Test Person\" title=\"Test Person\" height=\"150\" width=\"100\" />' or scaledImageTag == '')", "def test_create_image(self):\n pass", "def image_comparison(self):\n for result in self.cards:\n if result.image_status:\n return True\n return False", "def test_empty_img():\n assert detected_boxes[-1] == ground_truth_boxes[-1]", "def test_get_image(self):\n with open(self.subject, \"rb\") as f:\n content = f.read()\n\n image = image_helper.get_image(content)\n\n self.assertEqual(image.size, (800, 450))", "def testMissingImage(self):\n self.assertNotIn('no_image', self.data)", "def images_exist(self):\n pass", "def check(self, grain=50):\r\n opengles.glDisable(GL_SCISSOR_TEST)\r\n self.s_flg = False\r\n opengles.glReadPixels(0, self.y0, self.ix, 1,\r\n GL_RGB, GL_UNSIGNED_BYTE,\r\n ctypes.byref(self.img))\r\n r0 = self.img[0:3]\r\n for i in xrange(0, self.img_sz, self.step):\r\n if self.img[i:(i+3)] != r0:\r\n return True\r\n\r\n return False", "def test_get_image(self):\n\n spine_data_loader = SpineDataLoader(dirpath_data=self.dirpath,\n batch_size=4)\n\n for idx in range(4):\n image = spine_data_loader.get_image(str(idx))\n assert image.shape == (256, 256, 1)\n assert image.min() == 0.0\n assert image.max() == 1.0\n assert image.dtype == 'float64'", "def image_check(kwargs) -> bool:\n\n # Kwarg argument check\n return kwarg_check(\n kwargs=kwargs,\n options=[\n \"min_captured_at\",\n \"max_captured_at\",\n \"radius\",\n \"image_type\",\n \"organization_id\",\n \"fields\",\n ],\n callback=\"image_check\",\n )", "def checkImages(self):\r\n\r\n self.leftImage, self.rightImage, res = self.receiver.getImageData()\r\n\r\n return res", "def check(self, grain=50):\n opengles.glReadPixels(0, 0, self.ix, self.iy,\n GL_RGB, GL_UNSIGNED_BYTE,\n ctypes.byref(self.img))\n r0 = self.img[0:3]\n step = 3 * int(self.ix * self.iy / 50)\n for i in xrange(0, len(self.img)-3, step):\n if self.img[i:(i+3)] != r0:\n return True\n\n return False", "def test_list_image(self):\n pass", "def testimage_handler(self):\n\t\t\n\t\tthings = Thing.objects.all()\n\t\tif len( things ):\n\t\t\tthing = things[0]\n\t\telse:\n\t\t\tc = Client()\n\t\t\tdata = parse_qs( 'title=&tags=&lattitude=32.82248&longitude=-96.762986&duration=&parent=&privacy=U&lifespan=&format=txt' )\n\t\t\tdata[ 'media' ] = open( MEDIA_ROOT + 'unittest_image.jpg' )\n\t\t\tc.post( '/api/place/', data )\n\t\t\t\n\t\t\tthing = Thing.objects.all()[0]\n\n\t\t\n\t\turi = thing.media.replace( 'http://' + DOMAIN, '' )\n\t\t\n\t\tc = Client()\n\t\tresponse = c.get( uri )\n\t\tself.failUnlessEqual(response.status_code, 200)", "def hasImage(self):\n if self.getImage():\n return True\n return False", "def test_image_path(self):\n self.assertEqual(\n self.mineral.image_path,\n 'minerals/images/some_filename.jpg')", "def test_get_file_image(self):\n image = image_helper.get_file_image(self.subject)\n\n self.assertEqual(image.size, (800, 450))", "def test_smear(self):\n image = self.design.layout.layers[0].images[0]\n assert len(image.smears) == 1", "def is_image(mine=None, file=None):\n if file:\n mine = get_file_mine(file)\n print(mine)\n if mine:\n return mine.find('image') != -1\n\n return False", "def test_rmg_mode(self):\n self.assertEqual(self.rmgmode, False)", "def checkForced(self, source, forced):\n self.assertEqual(source.get(\"flux.naive\"),\n self.image.get(self.x, self.y) if forced else self.image.get(self.xcen, self.ycen))", "def test_blue_image_exists_or_not(self):\n response = self.app.get('/image/blue')\n if len(os.listdir(img_dir)):\n self.assertEqual(response.status_code, 200)\n self.assertIn(b'blue image', response.data)\n else:\n self.assertEqual(response.status_code, 404)", "def test_get_image_id(self):\n self.roses.save_image()\n image_id=Images.get_image_id(self.roses.id)\n self.assertTrue(image_id.id==self.roses.id)", "def test_single_image(self, client):\n step = 1\n plugin_name = PluginNameEnum.IMAGE.value\n train_id = gbl.get_train_ids()[0]\n tag_name = gbl.get_tags(train_id, plugin_name)[0]\n expected_image_tensor = gbl.get_single_image(train_id, tag_name, step)\n\n params = dict(train_id=train_id, tag=tag_name, step=step)\n url = get_url(BASE_URL, params)\n response = client.get(url)\n recv_image_tensor = get_image_tensor_from_bytes(response.data)\n\n assert expected_image_tensor.any() == recv_image_tensor.any()", "def test_aws_service_api_image_get(self):\n pass", "def get_image_url():", "def test_on_skimage_png(self):\n from_skimage = diffread(TEST_PNG)\n\n self.assertTupleEqual(from_skimage.shape, (256, 256))\n self.assertTrue(np.allclose(from_skimage, np.ones_like(from_skimage)))", "def identify_image(im):\n score_cures = np.mean(im[1025:1065, 1130:1180, 0])\n score_ingredients = np.mean(im[1025:1065, 675:720, 0])\n if score_cures < 177.5:\n return 'cures'\n if score_ingredients < 177.5:\n return 'ingredients'\n else:\n return 'other'", "def test_instance(self):\n self.assertTrue(isinstance(self.new_image, Image))", "def testQuestionFour(self):\n self.assertTrue(os.path.exists(\"./mandelbrot.png\"), \"Question 4's output (mandelbrot.png) does not exist.\")", "def hasImg(img_name):\n try:\n Image.objects.raw({\"_id\": img_name}).first()\n return True\n except pymodm_errors.DoesNotExist:\n return False", "def test(cls, pathHolder, parentCrawler):\n if not super(Jpg, cls).test(pathHolder, parentCrawler):\n return False\n\n return pathHolder.ext() == 'jpg'", "def verify(image_path):\n try:\n with Image.open(image_path) as img:\n img.verify()\n return True\n except Exception as e:\n log.warn('Path [{}] does not point to an image: [{}]'.format(image_path, e))\n return False", "def picture(result):\n media = result.entities.get('media')\n if media:\n return media[0].get('type') == u'photo'\n return False", "def test_check_image_color(self):\n result = analyzer.check_image_color(\"tests/test_files/sample.jpg\")\n self.assertEqual(result, \"light\")", "def allowed_image(self, module_id):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tself.log(\"In allowed_image: \" + module_id,level=logging.DEBUG)\n\t\tcfg = self.cfg\n\t\tif self.build['ignoreimage']:\n\t\t\tself.log(\"ignoreimage == true, returning true\" + module_id,level=logging.DEBUG)\n\t\t\treturn True\n\t\tself.log(str(cfg[module_id]['shutit.core.module.allowed_images']),level=logging.DEBUG)\n\t\tif cfg[module_id]['shutit.core.module.allowed_images']:\n\t\t\t# Try allowed images as regexps\n\t\t\tfor regexp in cfg[module_id]['shutit.core.module.allowed_images']:\n\t\t\t\tif not shutit_util.check_regexp(regexp):\n\t\t\t\t\tself.fail('Illegal regexp found in allowed_images: ' + regexp) # pragma: no cover\n\t\t\t\tif re.match('^' + regexp + '$', self.target['docker_image']):\n\t\t\t\t\treturn True\n\t\treturn False", "def check_type(filename):\n try:\n im = Image.read(filename)\n except SanperaError:\n return False\n else:\n return im.original_format in [b'JPEG', b'PNG', b'GIF']", "def check_got_promotion():\n im = region_grabber((550, 250, 815, 320)) # Hardcoded\n pos = imagesearcharea(\"Images/promotion_queen.jpg\", 0, 0, 0, 0, 0.9, im)\n if pos != [-1, -1]:\n print(\"Got promotion\")\n pos_image = [550 + pos[0], 250 + pos[1]]\n click_image(\"Images/promotion_queen.jpg\", pos_image, \"left\", 0.2)\n time.sleep(0.5)\n return True\n return False", "def test_aws_service_api_public_image_get(self):\n pass", "def test_grdimage_fails():\n fig = Figure()\n with pytest.raises(GMTInvalidInput):\n fig.grdimage(np.arange(20).reshape((4, 5)))", "def check_image(image):\n\n if not path.isfile(image):\n raise ImageException('Error: Singularity image \"%s\" not found.' % image)\n return True", "def test_ipython_robot_report_image(self):\n if PLATFORM == \"windows\":\n return\n\n self.activate_magic()\n\n with patch(\"jupyter_kernel_test.validate_message\", fake_validate):\n reply, outputs = self.execute_helper(code=MAGIC_IMAGE_TASK, timeout=60)\n assert reply[\"content\"][\"status\"] == \"ok\"\n assert any(\"image/png\" in output[\"content\"][\"data\"] for output in outputs)", "def getImage(cam):\n\n return cam.getImage()", "def get_sim_images(self, urdf_file, camera_pose_path):\n self.load_urdf(urdf_file, random_pose=False)\n # self.get_plane()\n # self.change_texture(self.plane_id)\n # self.change_texture(self.object_id)\n\n self.create_camera()\n self.from_camera_pose(camera_pose_path)\n self.step(1)\n\n self.get_bgr()\n self.get_seg()\n\n if self.get_object_mask(self.object_id) is None:\n return False\n\n self.get_object_depth()\n self.crop(padding=10, random=False)\n\n print('sim img')\n\n return self.bgr, self.depth", "def test_create_image_signature(self):\n pass", "def _check_consistency_between_imaging_extractors(self):\n return True", "def test_irobotframework_report_image(self):\n if PLATFORM == \"windows\":\n return\n\n with patch(\"jupyter_kernel_test.validate_message\", fake_validate):\n reply, outputs = self.execute_helper(code=IMAGE_TASK, timeout=60)\n assert reply[\"content\"][\"status\"] == \"ok\"\n assert any(\"image/png\" in output[\"content\"][\"data\"] for output in outputs)", "def test_add_to_scanner_image():\n image = Image(os.path.join(os.path.dirname(__file__), \"scanner_without_app1.jpg\"))\n assert not image.has_exif\n image.gps_latitude = (41.0, 29.0, 57.48)\n image.gps_latitude_ref = \"N\"\n image.gps_longitude = (81.0, 41.0, 39.84)\n image.gps_longitude_ref = \"W\"\n image.gps_altitude = 199.034\n image.gps_altitude_ref = GpsAltitudeRef.ABOVE_SEA_LEVEL\n image.make = \"Acme Scanner Company\"\n image.model = \"Scan-o-Matic 5000\"\n image.datetime_original = \"1999:12:31 23:49:12\"\n image.datetime_digitized = \"2020:07:11 10:11:37\"\n image.brightness_value = 10.9876 # provides coverage for SRATIONAL\n image.user_comment = \"This image was scanned in from an old photo album.\" # provides coverage for user comment\n\n assert image.has_exif\n assert image.gps_latitude == (41.0, 29.0, 57.48)\n assert image.gps_latitude_ref == \"N\"\n assert image.gps_longitude == (81.0, 41.0, 39.84)\n assert image.gps_longitude_ref == \"W\"\n assert image.gps_altitude == 199.034\n assert image.gps_altitude_ref == GpsAltitudeRef.ABOVE_SEA_LEVEL\n assert image.make == \"Acme Scanner Company\"\n assert image.model == \"Scan-o-Matic 5000\"\n assert image.datetime_original == \"1999:12:31 23:49:12\"\n assert image.datetime_digitized == \"2020:07:11 10:11:37\"\n assert image.brightness_value == 10.9876 # provides coverage for SRATIONAL\n assert (\n image.user_comment == \"This image was scanned in from an old photo album.\"\n ) # provides coverage for user comment\n\n segment_hex = (\n binascii.hexlify(image._segments[\"APP1\"].get_segment_bytes())\n .decode(\"utf-8\")\n .upper()\n )\n assert \"\\n\".join(textwrap.wrap(segment_hex, 90)) == ADD_TO_SCANNED_IMAGE_BASELINE", "async def test_get_image(opp, utcnow):\n helper = await setup_test_component(opp, create_camera)\n image = await camera.async_get_image(opp, helper.entity_id)\n assert image.content == base64.b64decode(FAKE_CAMERA_IMAGE)", "def getimgs():", "def check_sub_image(self, ndvi_filename, input_path):\n rgb_filename = re.sub(\"BWNDVI\",\"RGB\",ndvi_filename)\n rgb_img = Image.open(self.get_file(os.path.join(input_path, rgb_filename),\n self.input_location_type))\n img_ok = check_image_ok(rgb_img, 0.05)\n return img_ok", "def verify(image_path, database, model):\n\n ### START CODE HERE ###\n\n # Step 1: Compute the encoding for the image. Use img_to_encoding() see example above. (≈ 1 line)\n status, encoding = img_to_encoding(image_path, model, resize=True)\n if not status:\n return None, None, encoding\n\n dist = 0\n\n # Step 2: Compute distance with identity's image (≈ 1 line)\n for (name, db_enc) in database.items():\n\n dist += np.linalg.norm(db_enc - encoding)\n\n final_dist = dist / len(database)\n\n # Step 3: Open the door if dist < 0.7, else don't open (≈ 3 lines)\n if final_dist < 0.7:\n print(\"welcome home!\")\n match = True\n else:\n print(\"please go away\")\n match = False\n\n ### END CODE HERE ###\n\n return final_dist, match, encoding", "def assertImageResponseGithub(self, package_name):\n BadgeTestCase._assertImageResponseGithub(\n self, package_name, main.BadgeStatus.SELF_INCOMPATIBLE)", "def assertWarp(self):\n if self.rect.size.height != 256 or self.rect.width != 128:\n raise ValueError(\"Bad image size for body warped image\")\n if self.format != self.format.R8G8B8:\n raise ValueError(\"Bad image format for warped image, must be R8G8B8\")", "def is_image(content_type):\n return content_type == \"image/jpeg\" or content_type == \"image/png\"", "def test_if_carousel_displays_a_valid_image(self):\n # She admires the big logo of the 'FeaturedApp' which is\n # currently displayed.\n carousel = self.browser.find_element_by_id(\"carousel-featured\")\n carousel_html = carousel.value_of_css_property(\"background\")\n featured_app = self.browser.find_element_by_id(\"featured_app_href\")\n featured_app.get_attribute(\"href\").split(\"/apps/\")[1]\n carousel_image = carousel_html.split('/', 1)[1]\n carousel_image = \"/\" + carousel_image\n carousel_path = carousel_image.split('\") no-repeat')[0][1:]\n project_path = Path(__file__).ancestor(2)\n fedorasoftware_app_path = project_path.child(\"fedora_software\")\n carousel_file = os.path.join(\n fedorasoftware_app_path,\n carousel_path\n )\n self.assertTrue(os.path.exists(carousel_file))", "def get_testing_image(mode, value = None, with_truth = False):\n if mode and value is None:\n # In this case, an image path has been passed.\n if not os.path.exists(mode):\n raise FileNotFoundError(\"You have only provided an argument for `mode`, this case expects \"\n \"an image filepath. Try passing a different, valid image path.\")\n else:\n # Return the processed image.\n return preprocess_image(mode)\n else:\n # Choose the dataset.\n if mode not in [\"train\", \"val\", \"test\", \"eval\"]:\n raise ValueError(f\"Received invalid dataset mode '{mode}', expecting train, val, test, or eval.\")\n # Construct the dataset.\n dataset = AgricultureVisionDataset()\n dataset.construct()\n\n # Get the corresponding dataset.\n if mode == \"eval\":\n iterator = dataset.evaluation_dataset()\n else:\n iterator = getattr(dataset, f\"{mode}_data\")\n\n # Iterate over the data.\n for indx, item in enumerate(iterator):\n # If the index is equal to the value.\n if indx == value:\n if hasattr(item, \"numpy\"):\n # Item is just a pure piece of image data (from a test set).\n return np.expand_dims(item.numpy()[0], axis = 0)\n elif len(item) == 2:\n # Item is a train/label data (from train/val).\n if with_truth:\n # If we want the image as well as the label.\n return np.expand_dims(item[0].numpy()[0], axis = 0), \\\n np.expand_dims(item[1].numpy()[0], axis = 0)\n else:\n return np.expand_dims(item[0].numpy()[0], axis = 0)\n else:\n # Any other case which has not already been covered.\n return np.expand_dims(item[0].numpy(), axis = 0)\n else:\n continue\n\n # For some reason, if nothing has been returned, then throw an error.\n raise Exception(\"Nothing was returned, something must be broken.\")", "def test_getImages(self): # GIVEN the group chat has at least one image\n testBot = bot.Bot(os.environ['bot_id'], os.environ['token'], os.environ['group_ID'])\n imageList = testBot.run() #AND THEN post_images calls the private get_images method which returns an array\n self.assertTrue(len(imageList) > 0) #THEN there should be at least one element in the array", "def test_image_mock_produces_expected_shape(self):\n\n with TemporaryDirectory() as tmp_dir:\n\n cases = [\n {\n \"x_dim\": 8,\n \"y_dim\": 8,\n \"num_channels\": 3,\n \"output_path\": \"/foo\",\n \"write_image\": True\n }\n ]\n\n for cid, case in enumerate(cases):\n output_path = os.path.join(tmp_dir, \"dummy%s.jpg\" % cid)\n img = mock_raw_image(x_dim=case[\"x_dim\"],\n y_dim=case[\"y_dim\"],\n num_channels=case[\"num_channels\"],\n output_path=output_path,\n write_image=case[\"write_image\"])\n\n self.assertEqual(img.shape, (case[\"x_dim\"], case[\"y_dim\"],\n case[\"num_channels\"]))\n if case[\"write_image\"]:\n self.assertTrue(tf.gfile.Exists(output_path))", "def _assertImageResponse(\n self, package_name, expected_status, expected_left_text):\n json_response = self.get_image_json(package_name)\n self.assertEqual(json_response['left_text'], expected_left_text)\n self.assertEqual(json_response['right_text'], expected_status.value)\n self.assertEqual(json_response['right_color'],\n main.BADGE_STATUS_TO_COLOR.get(expected_status))\n self.assertLinkUrl(package_name, json_response['whole_link'])", "def assert_img_equal(request):\n\n testname = request.node.name\n filename = Path(request.module.__file__)\n test_dir = filename.parent / filename.stem\n test_dir.mkdir(exist_ok=True)\n\n def _img_equal(img, index=0):\n expected_file = test_dir / f\"{testname}_{index}.png\"\n actual_file = test_dir / f\"{testname}_{index}_actual.png\"\n if img.ndim == 2:\n cv2.imwrite(str(actual_file), img)\n else:\n img_bgr = img.copy()\n img_bgr[..., :3] = img_bgr[..., :3][..., ::-1]\n cv2.imwrite(str(actual_file), img_bgr) # img is RGB, imwrite expects BGR\n\n if not expected_file.exists():\n raise AssertionError(\n f\"{expected_file} does not exist! Check newly produced img with a command like:\\n\\n feh {actual_file}\\n\\n\"\n )\n\n try:\n pytest.helpers.assert_img_equal(expected_file, img)\n except Exception as e:\n raise AssertionError(f\"{expected_file} differs from {actual_file}\") from e\n\n return _img_equal", "def has_image(self):\n return hasattr(self, \"_image\") and self._image is not None", "def check_images():\n saved_stdout, saved_stderr = sys.stdout, sys.stderr\n\n out, err = StringIO(), StringIO()\n try:\n sys.stdout, sys.stderr = out, err\n check_images_main()\n except SystemExit:\n pass\n finally:\n stdout, stderr = out.getvalue().strip(), err.getvalue().strip()\n sys.stdout, sys.stderr = saved_stdout, saved_stderr\n\n return stdout, stderr", "def has_picture(self):\n try:\n first = self.picture_planets()[0]\n except IndexError:\n first = None\n\n return first is not None", "def test_get_image_url(self):\r\n course = CourseFactory.create(org='edX', course='999')\r\n self.assertEquals(course_image_url(course), '/c4x/edX/999/asset/{0}'.format(course.course_image))", "def assertImageResponseGithub(self, package_name):\n BadgeTestCase._assertImageResponseGithub(\n self, package_name, main.BadgeStatus.SUCCESS)", "def is_image(pos, image, start_pos, dim_square):\n # Grab image on real board\n im = region_grabber((start_pos[0] + pos[1] * dim_square[0],\n start_pos[1] - (pos[0] + 1.0) * dim_square[1],\n start_pos[0] + (pos[1] + 1.0) * dim_square[0],\n start_pos[1] - pos[0] * dim_square[1]))\n\n pos_image = imagesearcharea(image, 0, 0, 0, 0, 0.9, im)\n return pos_image != [-1, -1]", "def check_image(self, filename):\n error = None\n file_type = None\n\n abs_path = os.path.expanduser(os.path.expandvars(filename))\n\n if not os.path.isabs(abs_path):\n try:\n if is_qt_designer():\n p = self.get_designer_window()\n if p is not None:\n ui_dir = p.absoluteDir().absolutePath()\n abs_path = os.path.join(ui_dir, abs_path)\n else:\n parent_display = self.widget.find_parent_display()\n base_path = None\n if parent_display:\n base_path = os.path.dirname(\n parent_display.loaded_file())\n abs_path = find_file(abs_path, base_path=base_path)\n except Exception as ex:\n print(\"Exception: \", ex)\n error = \"Unable to find full filepath for {}\".format(filename)\n abs_path = filename\n # First, lets try SVG. We have to try SVG first, otherwise\n # QPixmap will happily load the SVG and turn it into a raster image.\n # Really annoying: We have to try to load the file as SVG,\n # and we expect it will fail often (because many images aren't SVG).\n # Qt prints a warning message to stdout any time SVG loading fails.\n # So we have to temporarily silence Qt warning messages here.\n qInstallMessageHandler(self.qt_message_handler)\n svg = QSvgRenderer()\n if svg.load(abs_path):\n file_type = svg\n qInstallMessageHandler(None)\n return error, file_type\n qInstallMessageHandler(None)\n # SVG didn't work, lets try QPixmap\n image = QPixmap(abs_path)\n if not image.isNull():\n file_type = image\n return error, file_type\n # If we get this far, the file specified could not be loaded at all.\n if error is None:\n error = \"Could not load image \\n{}\".format(filename)\n return error, file_type", "def hasImage(self):\n return self._image is not None", "def check_image(image, psf_criteria=6):\n \n print('checking image: {}'.format(image))\n tab = pd.DataFrame(np.array(Table.read(image, format='fits')))\n # check the mean of FWHM < psf_criteria for good image\n if tab['FWHM_IMAGE'].mean() < psf_criteria:\n return 1\n else:\n return 0", "def load_from_images(self):\n logging.debug(\"load_from_images called\")\n return True", "def test_images(self):\n\n message = {\"method\": \"images\", \"params\": {\"elem\": None}}\n response = yield self._get_response(message)\n\n self.assertIsInstance(response, dict)\n self.assertEqual(response[\"method\"], \"images\")\n self.assertIsInstance(response[\"result\"], list)\n\n images = [i[\"tag\"] for i in response[\"result\"]]\n\n self.assertIn(self.tag_image, images)", "def __diff_image(self):\n img = cv2.imread(self.imagefile()).copy()\n Reference.__draw_bugs(img, self.__true_positives, False, 1)\n Reference.__draw_bugs(img, self.__false_negatives, (0, 255, 0))\n Reference.__draw_bugs(img, self.__false_positives, (0, 0, 255))\n return img", "def test_get_image_url(self):\r\n course = CourseFactory.create(org='edX', course='999')\r\n url = utils.course_image_url(course)\r\n self.assertEquals(url, '/c4x/edX/999/asset/{0}'.format(course.course_image))", "def assertImageResponseGithub(self, package_name):\n BadgeTestCase._assertImageResponseGithub(\n self, package_name, main.BadgeStatus.PAIR_INCOMPATIBLE)", "def verifyImage(img_url):\n img_url = request.form['img_url']\n prefix = img_url.startswith(('http:', 'https:'))\n suffix = img_url.endswith(('.jpg', '.png', '.gif'))\n if (not prefix) or (not suffix):\n return False\n try:\n urllib2.urlopen(img_url)\n return True\n except (ValueError, urllib2.HTTPError, urllib2.URLError):\n return False", "def test_class_image(self):\n fwa = FakeWikiArchivo(\n 'abcd <a href=\"/wiki/foobar\" class=\"image\">FooBar</a> dcba'\n )\n _, r = self.peishranc(fwa)\n self.assertEqual(r, [])", "def check_image(self, tag):\n image_name = self.build_image_name(tag)\n try:\n self.client.images.get_registry_data(image_name)\n return True\n except Exception as ex:\n print('Image {} does not exist: '.format(image_name), str(ex))\n return False", "def test_aws_service_api_private_image_get(self):\n pass", "def test_RGB_mode():\n\n model = Instafilter(\"Lo-Fi\")\n\n f_image = __local__ / \"Normal.jpg\"\n\n img1 = model(f_image)\n img2 = model(f_image, is_RGB=True)\n\n diff = (img1 - img2).sum()\n\n assert abs(diff) > 0", "def test_save_image(self):\n self.roses.save_image()\n image = Images.objects.all()\n self.assertEqual(len(image), 1)", "def verify(image_path, identity, database, model):\r\n \r\n \r\n encoding = img_to_encoding(image_path=image_path, model=model)\r\n \r\n dist = np.linalg.norm(np.subtract(database[identity], encoding))\r\n \r\n if dist<0.7:\r\n print(\"It's \" + str(identity) + \", welcome home!\")\r\n door_open = True\r\n else:\r\n print(\"It's not \" + str(identity) + \", please go away\")\r\n door_open = False\r\n \r\n \r\n return dist, door_open" ]
[ "0.66298425", "0.6477675", "0.64134514", "0.640309", "0.63916004", "0.6377701", "0.6357215", "0.633534", "0.63193035", "0.63193035", "0.6305505", "0.62537336", "0.6252625", "0.62456065", "0.6232906", "0.6224744", "0.6224744", "0.62182146", "0.6191985", "0.6188353", "0.61810327", "0.61798644", "0.6144117", "0.6109243", "0.6070198", "0.6062834", "0.6029354", "0.602734", "0.60262406", "0.60189617", "0.60124856", "0.5984564", "0.5958125", "0.59387445", "0.5930844", "0.5929822", "0.5925974", "0.59188795", "0.591653", "0.59031403", "0.58914477", "0.58847696", "0.5883258", "0.588036", "0.58777785", "0.5870035", "0.585326", "0.58490855", "0.5846921", "0.5842419", "0.5838088", "0.5822352", "0.5819578", "0.58028316", "0.57883984", "0.5784654", "0.57839334", "0.57803935", "0.57785445", "0.5771024", "0.5770275", "0.57410043", "0.5738265", "0.57321894", "0.57299536", "0.5716863", "0.5702489", "0.5700199", "0.56997156", "0.56970674", "0.569479", "0.56941867", "0.5691259", "0.5689193", "0.5679759", "0.56768996", "0.5672381", "0.5670689", "0.56697416", "0.56694835", "0.5668589", "0.565489", "0.5654753", "0.564836", "0.5646827", "0.56441057", "0.5642562", "0.5640354", "0.5636975", "0.5632146", "0.5631248", "0.562947", "0.56279653", "0.5623714", "0.5623403", "0.5619091", "0.5610871", "0.56104785", "0.56074625", "0.55989784", "0.5593156" ]
0.0
-1
Test the gallows method handles out of bounds indices
def test_gallows_outside_bounds(self): with mock.patch("hangman.cli.screen.print") as mock_print: for index in [-1, len(hangman.cli.screen._GALLOWS)]: hangman.cli.screen.Screen.gallows(index) mock_print.assert_called_with(hangman.cli.screen._GALLOWS[-1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_out_of_bounds_calls(self):\n with self.assertRaises(IndexError):\n self.gameBoard.getGridItem(101,101)", "def _validate_indexes(self, row, col):\n if min(row, col) < 0 or max(row, col) >= self._n:\n raise IndexError(\n \"Incorrect position (%d, %d) in grid of size %d\" % (\n row, col, self._n\n )\n )", "def check_bounds(self, index):\n if index < self.lower_bound or index > self.upper_bound:\n return False\n return True", "def _inrange(self, index):\n if len(index) != self.ndim:\n raise Exception('SparseN tensor has %d dimensions, and requires the same number of indices.'%self.ndim)\n for ii, ss in zip(index,self.shape):\n if ii < 0 or ii >= ss:\n raise Exception('Index is out of range: %d'%index)", "def isoutofbounds(indices, dims):\n indices = np.asarray(indices)\n dims = np.asarray(dims)\n z = np.zeros_like(dims)\n return np.any(np.logical_or(indices < z, indices >= dims), -1)", "def test_out_of_bounds(self) -> None:\n\n self.assertIsInstance(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10)), np.ndarray)\n self.pop.persons[:,idx.speed] = 1\n self.pop.persons[:,idx.x_axis] = 1.1\n self.pop.persons[:,idx.y_axis] = 1.1\n self.pop.persons[:,idx.x_dir] = 0.5\n self.pop.persons[:,idx.y_dir] = 0.5\n\n self.assertLess(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n self.assertLess(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n\n self.pop.persons[:,idx.x_axis] = -0.1\n self.pop.persons[:,idx.y_axis] = -0.1\n self.pop.persons[:,idx.x_dir] = -0.5\n self.pop.persons[:,idx.y_dir] = -0.5\n self.assertGreater(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n self.assertGreater(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)", "def check_index(index, array):\n if index.bottom != 1:\n pass\n #Only access arrays with whole indices!\n elif index.top >= len(array):\n pass\n #Array out of bounds error!\n elif index.sign == -1:\n pass\n #Indexes can't be negative!\n\n return index.top", "def test_step_out_of_bounds_indices(self):\n _, data_directory = self._collect_episode_data(\n num_episodes=6, max_episodes_per_file=3)\n with riegeli_backend_reader.RiegeliBackendReader(\n data_directory) as data_reader:\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n len(data_reader.steps))\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n -len(data_reader.steps) - 1)", "def test_contains_bounds(self):\n dim = Dimension(\"yolo\", \"uniform\", -3, 4)\n with pytest.raises(NotImplementedError):\n assert -3 in dim", "def __verify_index(self, index):\n if not isinstance(index, int):\n raise TypeError(\"Index must be of type int\")\n elif index >= self.length or index < -self.length:\n raise IndexError(\"Index out of bounds\")\n return True", "def is_valid_index(x, y, l_matrix):\n return x < l_matrix and y < l_matrix and x > -1 and y > -1", "def test_slice_negative_index_error(self):\n self.assertRaises(IndexError, lambda: self.table[-1])", "def test_raise_exception_bad_indices(self):\n print(\"Testing exception is raised if indices are bad\")\n\n with self.assertRaises(Exception) as no_index:\n get_region_data(self.wmo_boxes, self.float_name, self.config,\n [], self.pres)\n\n self.assertTrue('NO DATA FOUND' in str(no_index.exception))\n\n with self.assertRaises(Exception) as big_index:\n get_region_data(self.wmo_boxes, self.float_name, self.config,\n [99999999999999999], self.pres)\n\n self.assertTrue('NO DATA FOUND' in str(big_index.exception))", "def testOffsetsOutOfBoundsDetection(self):\n sim = Simulation()\n sim.set_simulation_parameters(\n seed=11,\n task=36,\n output_directory=\"output\",\n min_speciation_rate=0.5,\n sigma=2,\n tau=2,\n deme=1,\n sample_size=0.01,\n max_time=100,\n )\n sim.set_map_files(sample_file=\"null\", fine_file=\"sample/SA_sample.tif\")\n sim.optimise_ram(ram_limit=10000)\n self.assertEqual(sim.fine_map.x_size, sim.sample_map.x_size)\n self.assertEqual(sim.fine_map.y_size, sim.sample_map.y_size)\n self.assertEqual(0, sim.sample_map.x_offset)\n self.assertEqual(0, sim.sample_map.y_offset)\n self.assertEqual(sim.fine_map.x_size, sim.grid.x_size)\n self.assertEqual(sim.fine_map.y_size, sim.grid.y_size)\n self.assertEqual(\"null\", sim.grid.file_name)\n sim.run()", "def out_of_bounds(self):\n return self._parms.get(\"out_of_bounds\")", "def out_of_bounds(self):\n return self.rect.right <= 0", "def test_bounds_respected_func_not_called(\n self, check_bounds_respected):\n self.controller.problem.value_ranges = {'test': (0, 1)}\n self.controller.minimizer = \"deriv_free_algorithm\"\n self.controller.flag_expected = [3]\n\n _ = loop_over_hessians(self.controller,\n options=self.options,\n grabbed_output=self.grabbed_output,\n checkpointer=self.cp)\n check_bounds_respected.assert_not_called()", "def test_bad_bounds(self):\n with pytest.raises(ValueError):\n Real(\"yolo\", \"norm\", 0, 2, low=+2, high=-2, shape=(4, 4))\n with pytest.raises(ValueError):\n Real(\"yolo\", \"norm\", 0, 2, low=+2, high=+2, shape=(4, 4))", "def test_step_out_of_bounds_indices(self):\n _, backend = _collect_episode_data(num_episodes=6)\n data_reader = in_memory_backend.InMemoryBackendReader(backend)\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n len(data_reader.steps))\n self.assertRaises(IndexError, operator.getitem, data_reader.steps,\n -len(data_reader.steps) - 1)", "def test_column_index_out_of_bounds(self):\n c = Column('foo', range(3))\n with self.assertRaises(IndexError):\n c[4]", "def test_maxIndex(self):\t\t\n self.assertEqual(attempt.maxIndexZ, 113)\n self.assertEqual(attempt.maxIndexW, 134)", "def occupied(self, (xIndex, yIndex)):\n return xIndex < 0 or yIndex < 0 or \\\n xIndex >= self.xN or yIndex >= self.yN or \\\n self.grid[xIndex][yIndex]", "def test_wrong_number_of_bounds(self):\n emsg = \"should have only an upper and lower limit\"\n with self.assertRaisesRegex(TypeError, emsg):\n _make_mask_cube(self.mask, self.coords, [0], self.units)\n with self.assertRaisesRegex(TypeError, emsg):\n _make_mask_cube(self.mask, self.coords, [0, 2, 4], self.units)", "def tile_is_out_of_borders(index, shape):\n return index[0] < 0 or index[1] < 0 or index[0] >= shape[0] or index[1] >= shape[1]", "def check_bounds(f):\n\n @wraps(f)\n def deco(chunk, coords, *args, **kwargs):\n x, y, z = coords\n\n # Coordinates were out-of-bounds; warn and run away.\n if not (0 <= x < 16 and 0 <= z < 16 and 0 <= y < CHUNK_HEIGHT):\n warn(\"Coordinates %s are OOB in %s() of %s, ignoring call\"\n % (coords, f.func_name, chunk), ChunkWarning, stacklevel=2)\n # A concession towards where this decorator will be used. The\n # value is likely to be discarded either way, but if the value is\n # used, we shouldn't horribly die because of None/0 mismatch.\n return 0\n\n return f(chunk, coords, *args, **kwargs)\n\n return deco", "def _inside_op_range(self, idx):\n\n if idx < self._parameters.op_range[0]:\n return False\n return (self._parameters.op_range[1] < 0 or\n idx <= self._parameters.op_range[1])", "def test_invalid(self):\n a = np.ones((10, 10))\n ai = np.ones((10, 2), dtype=np.intp)\n\n # sanity check\n take_along_axis(a, ai, axis=1)\n\n # not enough indices\n assert_raises(ValueError, take_along_axis, a, np.array(1), axis=1)\n # bool arrays not allowed\n assert_raises(IndexError, take_along_axis, a, ai.astype(bool), axis=1)\n # float arrays not allowed\n assert_raises(IndexError, take_along_axis, a, ai.astype(float), axis=1)\n # invalid axis\n assert_raises(AxisError, take_along_axis, a, ai, axis=10)", "def test_coords_out_of_range(self):\n layout = Layout()\n with self.assertRaises(AssertionError):\n led = layout.ledAt(-1, 0)\n with self.assertRaises(AssertionError):\n led = layout.ledAt(0, -1)\n with self.assertRaises(AssertionError):\n led = layout.ledAt(layout.width, 0)\n with self.assertRaises(AssertionError):\n led = layout.ledAt(0, layout.height)", "def test_out_of_bounds(oob_from, oob_to):\n with pytest.raises(ValueError):\n haversine_vector([oob_from], [oob_to])\n with pytest.raises(ValueError):\n haversine_vector([oob_from], [oob_to], normalize=False)", "def handle_negative_index(index, bound):\n try:\n if not less_equal(0, index):\n if is_literal(index) and index <= -self.int_max_:\n # this case is handled separately\n return index\n return bound + index\n except TypeError:\n logger.warning(f\"Cannot determine if {index} < 0\")\n return index", "def _add_out_of_bounds(self):\n cell = OutOfBoundsCell(self)\n for x in range(self._columns):\n self.cell_at(x, 0, cell)", "def test_bit_not_offset_out_of_range(self):\n ops = [bitwise_operations.bit_not(self.five_255_bin, 41, 8, None)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)", "def test_get_quadrant_with_negative_index(self):\n self.assertRaises(ValueError, self.sudoku.get_quadrant, -1)", "def test_bad_region():\n ref_file = pkg_resources.resource_filename('m260b.test_data', 'ref_practice_W_1_chr_1.fasta')\n read_file = pkg_resources.resource_filename('m260b.test_data', 'practice_w_1.std.bad_region1.bam')\n ref_hdr, reference = read_basic_fasta(ref_file) \n read_iter = pysam.Samfile(read_file)\n chr = ref_hdr[1:].strip()\n areg = list(active_regions(read_iter, reference, chr, start_offset=0, flank=30, dfrac=1.0))\n found = False\n for region, reads in areg:\n found |= region.start <= 5769 <= region.stop\n if not found:\n raise ValueError('Window did not open around variant')", "def IsBound(self) -> bool:", "def test_gallows_within_bounds(self):\n with mock.patch(\"hangman.cli.screen.print\") as mock_print:\n for index in range(len(hangman.cli.screen._GALLOWS)):\n hangman.cli.screen.Screen.gallows(index)\n mock_print.assert_called_with(hangman.cli.screen._GALLOWS[index])", "def _test_out_of_range(self):\n self.cdbconf.setup('KKG')\n self.cdbconf.setConfiguration('CUSTOM_OPT')\n az, el, latitude = [radians(50)] * 3\n site_info = {'latitude': latitude}\n self.p.setup(site_info, self.source, self.device)\n self.p.setRewindingMode('AUTO')\n offset = 20\n max_limit = self.device.getMaxLimit() \n min_limit = self.device.getMinLimit()\n Pis = max_limit - offset/2\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.p.setPosition(Pis)\n time.sleep(0.2) # Wait a bit for the setup\n max_rewinding_steps = (max_limit - min_limit) // self.device.getStep()\n expected = Pis - max_rewinding_steps*self.device.getStep() + offset\n self.source.setAzimuth(az)\n self.source.setElevation(el)\n self.p.startUpdating('MNG_TRACK', 'ANT_NORTH', az, el, None, None)\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.p.setOffset(offset)\n time.sleep(0.2) if self.using_mock else time.sleep(3)\n self.assertEqual(self.device.getActPosition(), expected)", "def check(self):\n self.lower_bound(5e-4)\n self.upper_bound(5e2)", "def test_get_quadrant_with_too_large_index(self):\n self.assertRaises(ValueError, self.sudoku.get_quadrant, 9)", "def check_limits(self):\n\n #Find the relative position of each leg vs. its \"zero\" position\n relpos = self.fixed_plate - self.fixed_plate_zero\n\n for leg in range(3):\n #Check that the leg is within allowable \"safe zone\"\n #Use the position of the leg (relative to 0) to find the index in the \"safe zone\" matrix\n i_x = nearest_index(self.leg_safe_xaxis, relpos[COORD_X, leg])\n i_z = nearest_index(self.leg_safe_zaxis, relpos[COORD_Z, leg])\n #Look up in the safe zone.\n self.leg_fault[leg] = (not self.leg_safe_zone[leg, i_x, i_z])\n\n if (not all(np.isreal(self.fixed_plate[:, leg]))) or any(np.isnan(self.fixed_plate[:, leg])):\n #A complex or NaN value = the angle found for the leg was invalid, meaning that the\n #leg would have to be longer to reach the desired position.\n self.leg_fault[leg] = True", "def _in_bounds(self, x, y):\r\n return 0 <= x < 8 and 0 <= y < 8", "def out_of_bounds(self):\n return not 0 <= self.nodes[0].x < WIDTH * SCALE or not 0 <= self.nodes[0].y < HEIGHT * SCALE", "def test_episode_step_out_of_bounds_indices(self):\n _, data_directory = self._collect_episode_data(\n num_episodes=6, max_episodes_per_file=3)\n with riegeli_backend_reader.RiegeliBackendReader(\n data_directory) as data_reader:\n for episode in data_reader.episodes:\n self.assertRaises(IndexError, operator.getitem, episode, len(episode))\n self.assertRaises(IndexError, operator.getitem, episode,\n -len(episode) - 1)", "def _get_valid_index(lons_side1, lons_side2, lons_side3, lons_side4,\n lats_side1, lats_side2, lats_side3, lats_side4,\n lons, lats, radius_of_influence):\n\n # Coarse reduction of data based on extrema analysis of the boundary\n # lon lat values of the target grid\n illegal_lons = (((lons_side1 < -180) | (lons_side1 > 180)).any() or\n ((lons_side2 < -180) | (lons_side2 > 180)).any() or\n ((lons_side3 < -180) | (lons_side3 > 180)).any() or\n ((lons_side4 < -180) | (lons_side4 > 180)).any())\n\n illegal_lats = (((lats_side1 < -90) | (lats_side1 > 90)).any() or\n ((lats_side2 < -90) | (lats_side2 > 90)).any() or\n ((lats_side3 < -90) | (lats_side3 > 90)).any() or\n ((lats_side4 < -90) | (lats_side4 > 90)).any())\n\n if illegal_lons or illegal_lats:\n # Grid boundaries are not safe to operate on\n return np.ones(lons.size, dtype=np.bool)\n\n # Find sum angle sum of grid boundary\n angle_sum = 0\n for side in (lons_side1, lons_side2, lons_side3, lons_side4):\n prev = None\n side_sum = 0\n for lon in side:\n if prev:\n delta = lon - prev\n if abs(delta) > 180:\n delta = (abs(delta) - 360) * (delta // abs(delta))\n angle_sum += delta\n side_sum += delta\n prev = lon\n\n # Buffer min and max lon and lat of interest with radius of interest\n lat_min = min(lats_side1.min(), lats_side2.min(), lats_side3.min(),\n lats_side4.min())\n lat_min_buffered = lat_min - float(radius_of_influence) / R\n lat_max = max(lats_side1.max(), lats_side2.max(), lats_side3.max(),\n lats_side4.max())\n lat_max_buffered = lat_max + float(radius_of_influence) / R\n\n max_angle_s2 = max(abs(lats_side2.max()), abs(lats_side2.min()))\n max_angle_s4 = max(abs(lats_side4.max()), abs(lats_side4.min()))\n lon_min_buffered = (lons_side4.min() -\n float(radius_of_influence) /\n (np.sin(np.radians(max_angle_s4)) * R))\n\n lon_max_buffered = (lons_side2.max() +\n float(radius_of_influence) /\n (np.sin(np.radians(max_angle_s2)) * R))\n\n # From the winding number theorem follows:\n # angle_sum possiblilities:\n # -360: area covers north pole\n # 360: area covers south pole\n # 0: area covers no poles\n # else: area covers both poles\n if round(angle_sum) == -360:\n # Covers NP\n valid_index = (lats >= lat_min_buffered)\n elif round(angle_sum) == 360:\n # Covers SP\n valid_index = (lats <= lat_max_buffered)\n elif round(angle_sum) == 0:\n # Covers no poles\n valid_lats = (lats >= lat_min_buffered) * (lats <= lat_max_buffered)\n\n if lons_side2.min() > lons_side4.max():\n # No date line crossing\n valid_lons = (lons >= lon_min_buffered) * \\\n (lons <= lon_max_buffered)\n else:\n # Date line crossing\n seg1 = (lons >= lon_min_buffered) * (lons <= 180)\n seg2 = (lons <= lon_max_buffered) * (lons >= -180)\n valid_lons = seg1 + seg2\n\n valid_index = valid_lats * valid_lons\n else:\n # Covers both poles don't reduce\n valid_index = np.ones(lons.size, dtype=np.bool)\n\n return valid_index", "def __check(self):\n if len(self._data)!=len(self._ptbins)+1: \n raise IndexError('Pt bins mismatch')\n for ptbin in self._data:\n if len(ptbin)!=len(self._etabins)+1:\n raise IndexError('Eta bins mismatch')", "def test_set_cell_by_indexes(self):\n self.dboard.set_cell_by_indexes(0, 0, \"00\")\n self.dboard.set_cell_by_indexes(0, 1, \"01\")\n self.dboard.set_cell_by_indexes(1, 0, \"10\")\n self.dboard.set_cell_by_indexes(1, 1, \"11\")\n\n with self.assertRaises(IndexError) as ctx:\n self.dboard.set_cell_by_indexes(40, 50, \"45\")\n\n self.assertEqual(\"list index out of range\", str(ctx.exception))", "def _catchindexerror(func):\n @wraps(func)\n def newfunc(*args, **kwargs):\n \"\"\" Catch the index error \"\"\"\n try:\n return func(*args, **kwargs)\n except IndexError as e:\n raise CharmmPSFError('Array is too short: %s' % e)\n\n return newfunc", "def test_bit_get_bit_offset_out_of_range(self):\n ops = [bitwise_operations.bit_get(self.test_bin_ones, 41, 1)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)", "def test_bit_set_bit_index_out_of_range(self):\n value = bytearray()\n value.append(255)\n ops = [bitwise_operations.bit_set(self.test_bin_zeroes, 41, 8, 1, value, None)]\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)", "def checkWithinBound(rowWithinBound,colWithinBound):\n if(rowWithinBound == 0 and colWithinBound == 0):\n return True\n else:\n return False", "def test_bit_get_int_bit_offset_out_of_range(self):\n ops = [bitwise_operations.bit_get_int(self.test_bin_ones, 41, 1, False)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)", "def test_find_break_points_invalid_range(self):\r\n self.assertRaises(ValueError, self.mc._find_break_points, 1, 0, 5)\r\n self.assertRaises(ValueError, self.mc._find_break_points, 1, 1, 5)", "def _validate_index(self, index):\r\n\t\tvalid_index = index is int(index) and index >= 0 and index < self._size\r\n\t\tif not valid_index:\r\n\t\t\traise IndexError()", "def bounds(self, pos):", "def is_valid(array, index):\n row, column = index\n return 0 <= row < len(array) and 0 <= column < len(array[row])", "def test_grid_index(buttons):\n\n app = App(rows=2, columns=2)\n with pytest.raises(GridIndexError):\n app[-5] = buttons[0]\n\n app[-1] = buttons[0]\n\n with pytest.raises(GridIndexError):\n app[2] = buttons[0]\n\n app[1] = buttons[0]", "def _is_valid_index(self, index):\n row = index.row()\n column = index.column()\n return not (row < 0 or column < 0 or\n row >= len(self.view_list) or column > 4 or\n index == QModelIndex())", "def check_limits(*args):\n if getNanny(args[0]) == 1:\n if np.sum(pt_index(args[0])) != 0:\n setAbortState(args[0], 1)\n elif np.sum(tc_index(args[0])) != 0:\n setAbortState(args[0], 1)\n elif np.sum(lc_index(args[0])) != 0:\n setAbortState(args[0], 1)\n return args[0].Controls.AbortState.abort_state", "def test_rwg_open_segment(grid):\n space = bempp.api.function_space(grid, \"RWG\", 0, segments=[1])\n\n dofs_empty = True\n boundary_dofs_empty = True\n\n for elem_index in range(grid.number_of_elements):\n if space.support[elem_index]:\n if _np.any(\n space.support[\n grid.element_neighbors.indices[\n grid.element_neighbors.indexptr[\n elem_index\n ] : grid.element_neighbors.indexptr[elem_index + 1]\n ]\n ]\n is False\n ):\n # Element is on the boundary\n for index, edge_index in enumerate(grid.element_edges[:, elem_index]):\n neighbors = list(grid.edge_neighbors[edge_index])\n if _np.any(space.support[neighbors] is False):\n # Edge is on the boundary\n if space.local_multipliers[elem_index, index] != 0:\n boundary_dofs_empty = False\n else:\n if _np.any(space.local_multipliers[elem_index] != 0):\n dofs_empty = False\n\n assert dofs_empty\n assert boundary_dofs_empty", "def test_episode_step_out_of_bounds_indices(self):\n _, backend = _collect_episode_data(num_episodes=6)\n data_reader = in_memory_backend.InMemoryBackendReader(backend)\n for episode in data_reader.episodes:\n self.assertRaises(IndexError, operator.getitem, episode, len(episode))\n self.assertRaises(IndexError, operator.getitem, episode,\n -len(episode) - 1)", "def test_find_index_map_exception(self):\n nb_images = 100\n height_map = 32\n width_map = 24\n \n y_float32_0 = numpy.random.normal(loc=1.2,\n scale=0.5,\n size=(nb_images, height_map, width_map, 1)).astype(numpy.float32)\n y_float32_1 = numpy.random.laplace(loc=0.4,\n scale=1.,\n size=(nb_images, height_map, width_map, 1)).astype(numpy.float32)\n y_float32_2 = numpy.random.standard_cauchy(size=(nb_images, height_map, width_map, 1)).astype(numpy.float32)\n y_float32_3 = numpy.random.uniform(low=-10.,\n high=10.,\n size=(nb_images, height_map, width_map, 1)).astype(numpy.float32)\n y_float32_4 = numpy.random.normal(loc=-1.,\n scale=4.,\n size=(nb_images, height_map, width_map, 1)).astype(numpy.float32)\n y_float32 = numpy.concatenate((y_float32_0, y_float32_1, y_float32_2, y_float32_3, y_float32_4),\n axis=3)\n idx_map_exception = lossless.stats.find_index_map_exception(y_float32)\n print('Index of the exception: {}'.format(idx_map_exception))", "def test_contains_extra_bounds(self):\n dim = Real(\"yolo\", \"norm\", 0, 3, low=-3, high=+3)\n assert dists.uniform.rvs(-3, 3) in dim\n assert -4 not in dim\n assert +4 not in dim\n assert (1, 2) not in dim", "def check_collisions(self):", "def check_i(self):\n for i in xrange(1, len(self.I)):\n assert self.I[i]-self.I[i-1] >= self.m", "def test_slice_index_error(self):\n self.assertRaises(IndexError, lambda: self.table[0])", "def test_bit_count_bit_offset_out_of_range(self):\n ops = [bitwise_operations.bit_count(self.zero_one_bin, 81, 1)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)", "def check_bounds (position, size):\n \n for item in position:\n # checks whether item is out of bounds\n if item < 0 or item >= size:\n return False\n return True", "def valid_index(self, index):\n if 0 <= index < self._list_size:\n return True\n else:\n return False", "def test_episode_out_of_bounds_indices(self):\n _, backend = _collect_episode_data(num_episodes=6)\n data_reader = in_memory_backend.InMemoryBackendReader(backend)\n self.assertRaises(IndexError, operator.getitem, data_reader.episodes,\n len(data_reader.episodes))\n self.assertRaises(IndexError, operator.getitem, data_reader.episodes,\n -len(data_reader.episodes) - 1)", "def test_upperbound_fails(self):\n emsg = \"should have both an upper and lower limit\"\n with self.assertRaisesRegex(TypeError, emsg):\n _make_mask_cube(self.mask, self.coords, [None, self.upper], self.units)", "def test_episode_out_of_bounds_indices(self):\n _, data_directory = self._collect_episode_data(\n num_episodes=6, max_episodes_per_file=3)\n with riegeli_backend_reader.RiegeliBackendReader(\n data_directory) as data_reader:\n self.assertRaises(IndexError, operator.getitem, data_reader.episodes,\n len(data_reader.episodes))\n self.assertRaises(IndexError, operator.getitem, data_reader.episodes,\n -len(data_reader.episodes) - 1)", "def test_get_random_indices_in_range(self):\n maze = Maze(10, 10)\n\n for test in range(1000):\n position = maze._Maze__get_random_indices()\n self.assertTrue(-1 < position[0] < 10)\n self.assertTrue(-1 < position[1] < 10)", "def validate_indexes(i, j, path, limit):\n while i == j or path[i] == path[j]:\n j = np.random.randint(0, limit)", "def in_range(table, index):\n if index > len(table):\n print(\"Error: index out of range\")\n return False\n if index < 0:\n print(\"Error: negative index\")\n return False\n return True", "def _check_index(idx):\n return isinstance(idx, _Int)", "def test_sample_from_extra_bounds_bad(self):\n dim = Real(\"yolo\", \"norm\", 0, 2, low=-2, high=+2, shape=(4, 4))\n with pytest.raises(ValueError) as exc:\n dim.sample(8)\n assert \"Improbable bounds\" in str(exc.value)", "def test_getitem_other(self):\n random.seed(12345)\n\n nside_coverage = 32\n nside_map = 128\n\n full_map = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN\n full_map[0: 5000] = random.random(size=5000)\n\n sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)\n\n indices = (1, 2, 3, 4)\n self.assertRaises(IndexError, sparse_map.__getitem__, indices)\n\n indices = 5.0\n self.assertRaises(IndexError, sparse_map.__getitem__, indices)", "def test_get_row_with_negative_index(self):\n self.assertRaises(ValueError, self.sudoku.get_row, -1)", "def check_masked (self, pos : list,) :\n count = 0\n total = 0\n for x in range(pos[0],min(pos[0] + AUTO_width1, self.m_x)) :\n for y in range(pos[1], min(pos[1] + AUTO_width1, self.m_y)) :\n total += 1\n if self.current_grid[x][y] :\n count += 1\n if count/total > 0.5 :\n return True\n else :\n return False", "def test_patch_grid_has_positive_dimension(self):\n with self.assertRaises(AssertionError):\n PatchGrid((), (), ())", "def test_rwg_closed_segment(grid):\n space = bempp.api.function_space(\n grid, \"RWG\", 0, segments=[1], include_boundary_dofs=True\n )\n\n for elem_index in range(grid.number_of_elements):\n if space.support[elem_index]:\n assert _np.all(space.local_multipliers[elem_index] != 0)\n else:\n assert _np.all(space.local_multipliers[elem_index] == 0)", "def assert_bounds(self, pos):\n row, col = pos\n\n if not (row in range(self.BOARD_SIZE) and\n col in range(self.BOARD_SIZE)):\n raise IndexError(\"Cannot place a worker out of board bounds\")", "def victory_checker() -> bool:\r\n conflict_check()\r\n for x in range(shape):\r\n for y in range(shape):\r\n if conflict_space[x, y] != 0:\r\n return False\r\n if separation_crawler(False):\r\n return False\r\n return True", "def test_get_row_with_too_large_index(self):\n self.assertRaises(ValueError, self.sudoku.get_row, 9)", "def check_allowed_positions(scan, psi, probe_shape):\n int_scan = scan // 1\n less_than_one = int_scan < 1\n greater_than_psi = np.stack(\n (int_scan[..., -2] >= psi.shape[-2] - probe_shape[-2],\n int_scan[..., -1] >= psi.shape[-1] - probe_shape[-1]),\n -1,\n )\n if np.any(less_than_one) or np.any(greater_than_psi):\n x = np.logical_or(less_than_one, greater_than_psi)\n raise ValueError(\"These scan positions exist outside field of view:\\n\"\n f\"{scan[np.logical_or(x[..., 0], x[..., 1])]}\")", "def test_creation_incorrect_hardbounds_count():\n with pytest.raises(ValueError) as __:\n value = 1\n __ = param.Integer(value=value, hardbounds=[0, 10, 20])", "def no_collisions(data, affected_points):\n return", "def count_oob(cube):\n out_range = (cube > 4).any(1) | (cube < -4).any(1)\n out_range = out_range.sum() / cube.shape[0]\n return out_range", "def getValidIndicies(self, points):\n try:\n inds = [ i for i in range(points.size) if points.flat[i] in self.maskSet ]\n return inds\n except Exception as error:\n print(\"failed in getValidIndicies\", error)\n return -1", "def test_get_column_with_negative_index(self):\n self.assertRaises(ValueError, self.sudoku.get_column, -1)", "def test_griewank(self):\n fun = get_problem('griewank', self.dimension)\n self.assertEqual(fun(self.array), 0.0)", "def test_get_column_with_too_large_index(self):\n self.assertRaises(ValueError, self.sudoku.get_column, 9)", "def test_adjacent_bomb_count(self):\n index = 0\n adj_list = utils.adjacent_bomb_count(index)\n adj_list_2 = [\n index + x\n for x in utils.LEFT_ADJ_LIST\n if 0 <= index + x <= (utils.TILE_COUNT - 1)\n ]\n self.assertEqual(adj_list, adj_list_2)", "def checkIndex(key):\n if not isinstance(key, (int, float)): raise TypeError\n if key<0: raise IndexError", "def isIndexError(self, line_number):\n return line_number < 0 or line_number + 1 > len(self.numbers)", "def testUnknownIndices(self):\n params = constant_op.constant(((0, 1, 2),))\n indices = array_ops.placeholder(dtypes.int32)\n gather_nd_t = array_ops.gather_nd(params, indices, batch_dims=1)\n shape = gather_nd_t.get_shape()\n self.assertIsNone(shape.ndims)\n self.assertIsNone(tensor_shape.dimension_value(shape[0]))", "def test_case_07_side_too_small(self):\n self.__assert_equals_test_case([(-2, 2, 3), (0, 2, 3)], 'InvalidInput')", "def _iter_points_out_of_bounds(pc, bounds):\n for i, axis_coords in enumerate(pc.arr):\n for compare, bound in zip((np.less, np.greater_equal),\n (bounds[i], bounds[i+3])):\n if bound is not None:\n yield compare(axis_coords, bound)", "def special_open_neighbours(self, y, x):\n if self.table_state[y][x] != \"-\" and self.table_state[y][x] == self.flags_nearby(y, x):\n l = [[ye, xe] for xe in range(\n x - 1, x + 2) if xe >= 0 for ye in range(y - 1, y + 2) if ye >= 0]\n for ye, xe in l:\n if xe >= self.x or ye >= self.y: # do not open out of bounds\n continue\n # if it is a bomb but not flagged\n if self.final_table[ye][xe] == Minesweeper.BOMB and self.table_state[ye][xe] != Minesweeper.FLAG:\n self.show_answer_board([ye, xe])\n print \"KABOOM!\"\n return Minesweeper.IS_A_BOMB\n self.open_neighbours(y, x)\n self.print_table(self.table_state)\n return Minesweeper.NOT_A_BOMB", "def check_bounds(self, row: int, col: int) -> bool:\n return 0 <= row < self.row and 0 <= col < self.col" ]
[ "0.707258", "0.67884666", "0.6612479", "0.6408795", "0.6359397", "0.63439476", "0.6291896", "0.62381816", "0.6209924", "0.62078774", "0.61812973", "0.61478335", "0.6143461", "0.6123273", "0.6093582", "0.6058438", "0.6057654", "0.6057572", "0.6046116", "0.60263014", "0.6016032", "0.60005015", "0.5993491", "0.5983284", "0.59779084", "0.59693366", "0.5940261", "0.5929815", "0.59111494", "0.58978707", "0.58963084", "0.5887246", "0.58782655", "0.5878038", "0.5877253", "0.58745533", "0.58703023", "0.586089", "0.5852894", "0.5839618", "0.5831217", "0.58226025", "0.5805475", "0.58013904", "0.5784193", "0.5783628", "0.57830364", "0.57704705", "0.5764202", "0.57530004", "0.57477057", "0.57401943", "0.5735768", "0.5733382", "0.5729537", "0.5699907", "0.5698746", "0.5698314", "0.56907296", "0.5683222", "0.56816626", "0.5669568", "0.5664021", "0.565911", "0.56566995", "0.56540304", "0.5647142", "0.56440604", "0.56400543", "0.5638495", "0.5638003", "0.56373835", "0.56202066", "0.5618588", "0.5615325", "0.56105864", "0.56094456", "0.55967534", "0.5590854", "0.55855083", "0.5578789", "0.5574118", "0.5568313", "0.555732", "0.55544394", "0.5550896", "0.55500376", "0.55313075", "0.55298054", "0.5529467", "0.55247", "0.55168164", "0.5515824", "0.5510057", "0.5508066", "0.55031604", "0.54966795", "0.5485522", "0.5480051", "0.54784626" ]
0.6361581
4
Test the get method
def test_get(self): with mock.patch("builtins.input") as mock_input: mock_input.return_value = "foo" self.assertEqual(hangman.cli.screen.Screen.get("Foo?"), "foo")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get(self):\n pass", "def test_get(self):\n return self.doRequest(self.url, method=\"GET\", body=self.input)", "def test_gettem_using_get(self):\n pass", "def test_get2(self):\n pass", "def test_get1(self):\n pass", "def test_listtem_using_get(self):\n pass", "def test_get(self):\n self.assertEqual(200, self.resp.status_code)", "def test_get(self):\n self.assertEqual(200, self.resp.status_code)", "def test_get(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)", "def test_get(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)", "def test_get(self):\n self.assertEqual(200, self.response.status_code)", "def test_get(self):\n self.assertEqual(200, self.response.status_code)", "def test_get(self):\n self.assertEqual(\n self.attempts[0],\n self.resource.get(self.attempts[0][_ATTEMPT.attempt_id]))", "def test_get(self):\n url, port = self.server.address\n\n #couple of basic GETs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n # GETs with params\n r = self.client.get(\"http://{0}:{1}/get_with_params\".format(url, port),\n params=self.params)\n self.assertEqual(200, r.status_code)\n self.assertEqual(str(self.params), r.text)\n\n # GETs with ...?", "def test_meme_get(self):\n pass", "def test_two_legged_get(self):\n resp, content = self._two_legged(\"GET\")\n self.assertEqual(int(resp['status']), 200)", "def get():", "def get():", "def test_get(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.GET, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.get(rest_url)", "def test_get(self):\n obs = self.tester.get('1.SKM7.640188')\n exp = Sample('1.SKM7.640188', self.tester)\n self.assertEqual(obs, exp)", "def test_get(self):\n expected_response = {\n 'id': 1111,\n 'first_name': 'Jhon',\n 'last_name': 'Doe',\n 'user_id': 1001,\n 'telegram_id': None\n }\n\n response = self.client.get(self.url)\n self.assertJSONEqual(json.dumps(expected_response), json.loads(response.content))\n self.assertEqual(response.status_code, 200)", "def test_get_token_supply_using_get(self):\n pass", "def test_get_info(self):\n pass", "def test_get_page(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)", "def test_get_page(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)", "def test_smoker_get(self):\n pass", "def test_get_with_real_data(self):\n self.getPage('/blah', method='PUT')\n self.getPage('/')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')\n self.assertBody('{\"mystring\": \"blah\"}')", "def do_get(self, *args):\n raise NotImplementedError()", "def get(self, *args):", "def test_get_one(self):\n response = self.client.get('/api/v1/parcels/100')\n self.assertEqual(response.status_code, 200)", "def test_get_by_name2(self):\n pass", "def test_get(self):\n response = self._get()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('object' in response.context)\n self.assertEquals(response.context['object'], self.obj)\n self.assertTrue('form' in response.context)\n self.assertFalse(response.context['form'].is_bound)\n self.assertEquals(response.context['form'].instance, self.obj)\n self._assert_no_change()", "def test_properties_get(self):\n pass", "def test_get(self):\n response = self.client.get(self.url)\n\n # Standard response\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(\"td_biblio/entry_list.html\")", "def test_get(self):\n response = self.client.get('/weather/', format=\"json\")\n self.assertEqual(response.status_code, 200)", "def test_get_method(self):\n self.getPage('/')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')\n self.assertBody('{\"mystring\": \"\"}')", "def test_get_by_name1(self):\n pass", "def test_get_fail(self):\n with self.assertRaises(AssertionError):\n self.resource.get(-1)", "def test_get_run(self):\n pass", "def test_get(self):\n # retrieve (get) request\n response = requests.get(self.url)\n # expected title of articles endpoint\n html_title = \"Crowd Scholar\"\n\n # assert get request returns a status code 200 (success)\n self.assertTrue(response.status_code is 200)\n # assert expected title is in response body\n self.assertTrue(html_title in response.text.title())", "def test_get_insumo(self):", "def test_get(self):\n self.assertEqual(self.tester.get('SEASON_ENVIRONMENT'), 'winter')\n self.assertEqual(self.tester.get('depth'), 0.15)", "def _get(self, *args, **kwargs):\n return self._request('get', *args, **kwargs)", "def test_solareclipses_get(self):\n pass", "def test_response_200_on_get(self):\n pass", "def test_load_book_details(self, mock_get):\n\n c = Client()\n data = {\n 'search_type': self.filter_subject,\n 'search_value': self.subject,\n }\n response = c.get('/taric_books/%s/' % self.ISBN)\n\n self.assertEqual(response.status_code, 200)", "def test_get(self):\n response = self._get()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('object' in response.context)\n self.assertEquals(response.context['object'], self.obj)\n self.assertEquals(self.model.objects.count(), 1)", "def test_get(self):\n response = self._get()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('object' in response.context)\n self.assertEquals(response.context['object'], self.obj)\n self.assertTrue('add_user_form' in response.context)\n self.assertFalse(response.context['add_user_form'].is_bound)", "def test_simple_translation_using_get(self):\n pass", "def test_v2_recognize_get(self):\n pass", "def test_00_api_get(self):\r\n # GET as Anonymous\r\n url = '/api/'\r\n action = 'get'\r\n self.check_limit(url, action, 'app')", "def get(self, *args, **kwargs):", "def test_doGet(self) -> None:\n\n status_code = apicall.doGet(URL, self._browserheader)\n print(\"in do get:\", status_code)\n assert status_code == API_SUCCESS", "def test_good_get_url(self):\n result = self._search('Love Story', just_results=True)\n get_url = result[0]['get_url']\n resp = self.app.get(get_url)\n self.assertEqual(resp.status_code, 200)\n self.assertIn('url', resp.data)\n self.assertIn('/d?', resp.data)", "def test_get_risk_profile_using_get(self):\n pass", "def test_client_retrieve(self):\n pass", "def test_get(self):\n self.assertEqual(self.tester.get('barcodesequence'), 'AGCGCTCACATC')", "def get(self, *args, **kwargs):\n pass", "def get(self, *args, **kwargs):\n pass", "def test_get_list(self):\n pass", "def get(self, *args, **kwargs):\n self.request(\"get\", *args, **kwargs)", "def test_get_record(self):\n pass", "def test_get(self):\n client = kazoo.client.KazooClient()\n kazoo.client.KazooClient.get.return_value = ('{xxx: 123}', None)\n self.assertEqual({'xxx': 123}, zkutils.get(client, '/foo'))\n\n # parsing error\n kazoo.client.KazooClient.get.return_value = ('{xxx: 123', None)\n self.assertEqual(\n '{xxx: 123',\n zkutils.get(client, '/foo', strict=False)\n )\n self.assertRaises(yaml.YAMLError, zkutils.get, client, '/foo')\n\n kazoo.client.KazooClient.get.return_value = (None, None)\n self.assertIsNone(zkutils.get(client, '/foo'))", "def test_get(self):\n response = self._get()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('form' in response.context)\n self.assertFalse(response.context['form'].is_bound)\n self.assertEquals(self.model.objects.count(), 0)", "def test_mocked_get_api(self):\n c = Client()\n response = c.get(\"/apimock/mocked/api/account/154/\")\n self.assertEqual(response.status_code, 200)\n self.assertIn(\n '<table border=\"1\"><tr><th>amount</th><td>10PLN</td></tr></table>', response.content)\n response2 = c.get(\"/apimock/mocked/api/account/187/\")\n self.assertEqual(response2.status_code, 200)\n self.assertIn(\n '<table border=\"1\"><tr><th>amount</th><td>10PLN</td></tr></table>', response2.content)", "def test_users_get(self):\n pass", "def test_users_get(self):\n pass", "def test_api_user_get(self):\n pass", "def test_get_token_supply_all_using_get(self):\n pass", "def test_get_json(self, test_url, test_payload, mock_get):\n mock_get.return_value = test_payload\n res = get_json(test_url)\n self.assertEqual(res, test_payload)", "def test_get(self):\n self.assertEqual(self.config.get('basic','greeting'),'hello')", "def test_get(self):\n obs = self.tester.get('1.SKM7.640188')\n exp = PrepSample('1.SKM7.640188', self.tester)\n self.assertEqual(obs, exp)", "def test_get_results(self):\n pass", "def test_get_with_parameters(self):\n self._register_uri(httpretty.GET)\n response = self.client.get(self.test_endpoint,\n foo=\"bar\", spam=\"eggs\")\n self.assertIn(\"OAuth\", self._last_request().headers[\"authorization\"])\n self.assertEqual(self._last_request().querystring[\"foo\"], [\"bar\"])\n self.assertEqual(self._last_request().querystring[\"spam\"], [\"eggs\"])\n self.assertEqual(response, self.test_data)\n self.assertEqual(self.client.last_url, self.test_uri)\n self.assertEqual(self.client.last_params, {\"foo\": b\"bar\",\n \"spam\": b\"eggs\"})\n self.assertEqual(self.client.last_response.json(), self.test_data)", "def test_info_get(self):\n response = self.client.open(\n '/info',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_get_method(self):\n field = self.base_field\n sch = SchemaField(field)\n self.assertTrue(hasattr(sch, 'get'))\n self.assertEqual(field.get('Name'), sch.get('Name'))\n self.assertEqual(field.get('constraints'), sch.get('constraints'))\n self.assertEqual(None, sch.get('bad_keys'))\n self.assertEqual('default', sch.get('bad_keys', 'default'))", "def get(self, data):\n pass", "def test_successful_on_get(self):\n\n url = '/%s/jobs/' % self.api\n\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)", "def test_GET(self):\n if not self.url:\n return\n response = self.client.get(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])", "def test_get(self):\n response = self.get('/study/create/')\n self.assertEqual(response.code, 200)\n self.assertNotEqual(str(response.body), \"\")", "def test_get_request_normal_response(self, mock_get):\n\n # Arrange\n # Construct our mock response object, giving it relevant expected behaviours\n mock_resp_instance = MockResponse({\"msg\": \"success\"}, 200, content=\"abc\")\n mock_get.return_value = mock_resp_instance\n\n # Act\n response = get_request_data(self.url, json_resp=False)\n\n # Assert that the request-response cycle completed successfully.\n self.assertEqual(mock_resp_instance.status_code, 200)\n self.assertEqual(response, mock_resp_instance)", "def test_get_details7(self):\n pass", "def get() -> None:\n pass", "def test_GET_fetcher():\n params = {\n 'key1':'value1',\n 'arg2':'value2'\n }\n\n ## test that request goes ok\n resp = wf_utils.fetch_GET_request(\n GET_ECHO_ENDPOINT,\n params=params\n )\n\n ## test that response json can be parsed\n payload = resp.json()\n\n ## test that response contains expected echo\n assert payload['args'] == params\n assert payload['headers']['user-agent'] == wf_utils.USER_AGENT", "def test_get_response(self):\n c = Client()\n response = c.get(reverse('index_view'))\n self.assertEqual(response.status_code, 200)", "def test_get_method(self):\n response = self.client.get('/o/register')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'auth/login.html')\n self.assertContains(\n response,\n 'Rejestracja w Volontulo'\n )\n self.assertContains(\n response,\n 'Logowanie'\n )\n self.assertNotIn('_auth_user_id', self.client.session)", "def test_get(self):\n num = random.choice(self.vehicle_list).id\n obj = self.client.bus.vehicles.get(num)\n self.assertEqual(type(obj), BusVehicle)\n self.assertEqual(obj.id, num)\n self.assertEqual(type(obj.run), BusRun)", "def get(self):\n self.get_or_post(method='GET')", "def test_error_html_using_get(self):\n pass", "def test_safeGet(self):\n self.assertIs(\n BMConfigParser().safeGet('nonexistent', 'nonexistent'), None)\n self.assertEqual(\n BMConfigParser().safeGet('nonexistent', 'nonexistent', 42), 42)", "def testGet(self):\n response = self.runGet(self.root)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(len(data), 1)", "def test_getting(self):\n self.assertEqual(self.test_notes['C'], self.C)", "def test_get(self):\r\n profile = self.profile_manager.get('testing')\r\n self.assertIsInstance(profile, Profile)", "def test_resource_user_resource_get_user_get(self):\n pass", "def test_metrostations_get(self):\n pass", "def test_get(self):\r\n resp = self.client.get_json(self.url + '/0')\r\n self.assertEqual(resp.status_code, 200)\r\n obj = json.loads(resp.content)\r\n self.assertEqual(self.starting_graders[0], obj)", "def test_get(self, employee_model):\n\n request = Mock()\n employee = Mock()\n employee_model.objects.get.return_value = employee\n\n self.serializer.data = {\n \"emp_no\": 10090,\n \"birth_date\": \"1961-05-30\",\n \"first_name\": \"Kendra\",\n \"last_name\": \"Hofting\",\n \"gender\": \"M\",\n \"hire_date\": \"1986-03-14\"\n }\n response = self.view.get(request, emp_no=1)\n\n employee_model.objects.get.assert_called_with(\n pk=1\n )\n self.assertEqual(response.status_code, 200)", "def test01getNumber(self):\n self.assertEqual( views.getNumber({}), {'number': 1234})", "def test_get(self):\n self.assertEqual(403, self.response.status_code)", "def test_mocked_get_json_format(self):\n c = Client()\n response = c.get(\"/apimock/mocked/mocked_get?format=json\")\n self.assertEqual(response.status_code, 200)\n self.assertEqual('{\"value\": \"testValue\"}', response.content)", "def test_get(self):\n #Validate the response\n resp = self.client.get('/api/v1/purchase-order/1/')\n self.assertEqual(resp.status_code, 200)\n \n #Validate the returned data\n obj = resp.data\n self.assertEqual(obj['id'], 1)\n self.assertEqual(obj['terms'], '0/net')\n self.assertEqual(obj['revision'], 0)\n \n #Test items\n self.assertIn('items', obj)\n self.assertEqual(len(obj['items']), 1)\n item1 = obj['items'][0]\n #self.assertIn('purchasing_units', item1)\n #self.assertEqual(item1['purchasing_units'], 'm')" ]
[ "0.91126484", "0.8588359", "0.857955", "0.8355553", "0.82723737", "0.8097538", "0.7919598", "0.7919598", "0.79123116", "0.79123116", "0.7767401", "0.7767401", "0.7659542", "0.7615611", "0.7463382", "0.7394966", "0.7359122", "0.7359122", "0.7332673", "0.73318106", "0.73232585", "0.73232293", "0.72942597", "0.72330505", "0.72330505", "0.7228757", "0.7185099", "0.71802956", "0.71790814", "0.7168635", "0.7159056", "0.7149953", "0.7117982", "0.7104531", "0.7100112", "0.7094874", "0.70657796", "0.7058364", "0.7058186", "0.705095", "0.70312315", "0.7018635", "0.70002437", "0.69996524", "0.6991668", "0.6986039", "0.6981901", "0.698035", "0.69770074", "0.69750535", "0.6968658", "0.6963862", "0.6954752", "0.69508034", "0.6936373", "0.6915778", "0.69101477", "0.6908935", "0.6908935", "0.690879", "0.6906988", "0.6900811", "0.68929946", "0.6891211", "0.6886133", "0.68853587", "0.68853587", "0.6872514", "0.6865998", "0.684911", "0.684879", "0.68454534", "0.68348324", "0.682706", "0.6821608", "0.6806897", "0.6802784", "0.6790947", "0.67725366", "0.6762373", "0.67386377", "0.6730335", "0.67289895", "0.6726918", "0.67263985", "0.67231095", "0.67230153", "0.67195654", "0.6719332", "0.6713545", "0.6704074", "0.6700867", "0.6696689", "0.66938204", "0.66902953", "0.66803277", "0.6673674", "0.66658914", "0.6663245", "0.6659032", "0.664532" ]
0.0
-1
Test the put method
def test_put(self): with mock.patch("builtins.print") as mock_print: hangman.cli.screen.Screen.put("foo!") mock_print.assert_called_with("\nfoo!")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kyc_put_request(self):\n pass", "def test_put_method(self):\n self.getPage('/blah', method='PUT')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')\n self.assertBody('{\"mystring\": \"blah\"}')", "def test_add_item_at_using_put(self):\n pass", "def put(self):\n pass", "def put(self):\n pass", "def test_put(self):\n url, port = self.server.address\n\n #couple of basic POSTs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n r = self.client.put(\"http://{0}:{1}/400?foo=bar\".format(url, port))\n self.assertEqual(400, r.status_code)", "def test_put(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.PUT, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.put(rest_url)", "def test_client_can_do_put_request(self):\n response = self.httpbin_4.test_requests_put_method()\n self.assertEqual(response.request.method, 'PUT')\n self.assertEqual(response.status_code, 200)", "def test_put_one(self):\n response = self.client.put('/api/v1/parcels/100')\n self.assertEqual(response.status_code, 200)", "def test_put_success(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 200)", "def put(self):\n raise exceptions.NotImplemented", "def test_api_user_put(self):\n pass", "def put(self):\n return", "def test_kyc_put_request_legal(self):\n pass", "def put(self):\n raise NotImplementedError()", "def test_put(self):\n client = kazoo.client.KazooClient()\n zkutils.put(client, '/foo/bar')\n kazoo.client.KazooClient.create.assert_called_with(\n '/foo/bar', b'', acl=mock.ANY, makepath=True,\n sequence=False, ephemeral=False)", "def put(data):", "def test_0_put(self):\n self.assertIsNotNone(save_node_info(self.node.name, self.node))", "def test_cards_put(self):\n pass", "def test_update_risk_profile_using_put(self):\n pass", "def test_put(self):\n\n url = reverse('file')\n\n data = {\n 'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertIn('file_id', response.data)\n self.assertIn('file_transfer_id', response.data)\n self.assertIn('file_transfer_secret_key', response.data)", "def test_aws_service_api_vm_command_put(self):\n pass", "def test_update_company_props_using_put(self):\n pass", "def test_update_bucket(self):\n pass", "def test_put_db_fail(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n with mock.patch('user_profile.models.UserProfile.update') as update:\n update.return_value = False\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def put(self, *_, **__): # pylint: disable=arguments-differ\n pass", "def put(self, *_, **__): # pylint: disable=arguments-differ\n pass", "def _put(self, *args, **kwargs):\n return self._request('put', *args, **kwargs)", "def test_security_on_put(self):\n # test the update url\n product = Product.objects.all()[0]\n url = '/product/xml/%s/' % product.item_number\n response = self.client.put(url,{'description':'my new description'})\n self.failUnlessEqual(response.status_code, 401)", "def test_putorganizations_item(self):\n pass", "def test_PUT(self):\n if not self.url:\n return\n response = self.client.put(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])", "def test_kyc_put_legal(self):\n pass", "def test_PUT4(self):\n payload = {\n \"make\": \"Nissan\",\n \"model\": \"Skyline\",\n \"year\": 1999,\n \"price\": 2200\n }\n r = requests.put(self.address + \"/loremipsum/42\", json=payload)\n self.assertEqual(r.status_code, 400)", "def put(self, put):\n\n self._put = put", "def test_kyc_put_document(self):\n pass", "def test_put_existing(self):\n def raise_exists(*args_unused, **kwargs_unused):\n \"\"\"zk.create side effect, raising appropriate exception.\"\"\"\n raise kazoo.client.NodeExistsError()\n\n client = kazoo.client.KazooClient()\n kazoo.client.KazooClient.create.side_effect = raise_exists\n zkutils.put(client, '/foo/bar')\n kazoo.client.KazooClient.set.assert_called_with('/foo/bar', b'')\n kazoo.client.KazooClient.set_acls.assert_called_with('/foo/bar',\n mock.ANY)", "def test_put_no_data(self):\n test_data = {}\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_update_case(self):\n pass", "def put(self, *args, **kwargs):\n self.request(\"put\", *args, **kwargs)", "def put(cls, obj):\n pass", "def _put(self, item: T) -> None:\n ...", "def put(self, *args, **kwargs):\n self.before_put(*args, **kwargs)\n\n super(DatastoreModel, self).put(*args, **kwargs)\n\n self.after_put(*args, **kwargs)", "def testPut(self):\n # XXX - not actually a unit test\n expectedOutput = (b'Transferred ' + self.testDir.asBytesMode().path +\n b'/testfile1 to ' + self.testDir.asBytesMode().path +\n b'/test\"file2')\n def _checkPut(result):\n self.assertFilesEqual(self.testDir.child('testfile1'),\n self.testDir.child('test\"file2'))\n self.assertTrue(result.endswith(expectedOutput))\n return self.runCommand('rm \"test\\\\\"file2\"')\n\n d = self.runCommand('put %s/testfile1 \"test\\\\\"file2\"'\n % (self.testDir.path,))\n d.addCallback(_checkPut)\n d.addCallback(lambda _: self.assertFalse(\n self.testDir.child('test\"file2').exists()))\n return d", "def _put(self, url, data, extra_headers=None):\n headers = {'X-Requested-By': 'Unit Tests'}\n headers.update(extra_headers)\n return self.client.put(\n url,\n content_type='application/json',\n data=utils.as_json(data),\n headers=headers,\n )", "def test_put_success(self):\n\n data = {\n 'time': '23:58:53'\n }\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_put_route(self):\n\n post = {\n 'ip': 'test_ip',\n 'next_hop': 'test_nexthop',\n 'communities': 'test_commu'\n }\n route_id = self.database.add_route(post)\n post3 = {\n 'ip': 'test_ip',\n 'next_hop': 'test_nexthop2',\n 'communities': 'test_commu2',\n 'is_activated': False,\n }\n put = self.database.route.find_one({'_id': route_id})\n route_id2 = self.database.put_route({\n '_id': put['_id'],\n 'ip': post3['ip'],\n 'communities': post3['communities'],\n 'next_hop': post3['next_hop'],\n 'is_activated': post3['is_activated']\n })\n post2 = self.database.route.find_one({'_id': route_id2['_id']})\n self.database.delete_route({'_id': route_id2['_id']})\n self.assertEqual(post2['ip'], post3['ip'], 'insertion failed')\n self.assertEqual(post2['next_hop'], post3['next_hop'],\n 'insertion failed')\n self.assertEqual(post2['communities'], post3['communities'],\n 'insertion failed')\n self.assertEqual(post2['is_activated'], post3['is_activated'],\n 'activation failed')", "def test_collection_put(testapp, execute_counter):\n initial = {\n 'title': \"Testing\",\n 'type': \"object\", # include a non-required field\n 'description': \"This is the initial insert\",\n }\n item_url = testapp.post_json('/embedding-tests', initial).location\n\n with execute_counter.expect(1):\n item = testapp.get(item_url).json\n\n for key in initial:\n assert item[key] == initial[key]\n\n update = {\n 'title': \"New Testing\",\n 'type': \"object\",\n 'description': \"This is the updated insert\",\n }\n testapp.put_json(item_url, update, status=200)\n\n res = testapp.get('/' + item['uuid']).follow().json\n\n for key in update:\n assert res[key] == update[key]", "def test_put_user_property(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_put(self):\n self.seed_static_data()\n\n params = {\n 'id': 2,\n 'event_id': 1,\n 'tag_type': 'REGISTRATION',\n 'name': {\n 'en': 'Renamed English Name', # Rename\n 'zu': 'Zulu Name'\n },\n 'description': {\n 'en': 'Renamed English Description',\n 'zu': 'Zulu Description'\n },\n 'active': True\n }\n\n response = self.app.put(\n '/api/v1/tag', \n headers=self.user1_headers, \n data=json.dumps(params),\n content_type='application/json')\n self.assertEqual(response.status_code, 200)\n\n response = self.app.get('/api/v1/tag', headers=self.user1_headers, data={'id': 2, 'event_id': 1, 'language': 'en'})\n data = json.loads(response.data)\n\n self.assertEqual(data['id'], 2)\n self.assertEqual(data['event_id'], 1)\n self.assertEqual(data['tag_type'], 'REGISTRATION')\n self.assertDictEqual(data['name'], {\n 'en': 'Renamed English Name',\n 'zu': 'Zulu Name'\n })\n self.assertDictEqual(data['description'], {\n 'en': 'Renamed English Description',\n 'zu': 'Zulu Description'\n })", "def test_company_put_permissions(self):\n companyPK = Company.objects.get(name=self.admin.profile.company.name).pk\n url = reverse('Company-detail', kwargs={'pk': companyPK + 1})\n data = {'name': 'NewTestCompany', 'address': {'address1': '123 fake st',\n 'address2': 'fake address 2',\n 'city': 'nowhere', 'state': 'IN', 'zip': '90210'}}\n response = self.client.put(url, data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(Company.objects.get(pk=companyPK).name,\n 'NewTestCompany')", "def test_update(self):\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.my_task.title = 'foo'\n key = self.task_storage.update(self.my_task)\n new_task = self.task_storage.find(key)\n\n self.assertEqual(self.my_task, new_task)", "def test_update(self):\n doctor = DoctorFactory.create(id=21)\n data = {'name': 'Joe'}\n self.assertNotEqual(doctor.name, data['name'])\n\n response = self.unath_client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n response = self.client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)", "def test_wallets_put(self):\n pass", "def _put(self, key: str, value):\n pass", "def test_put_request_by_owner(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.test_user1_token)\n response = client.post('/api/places/', self.restaurant_data, format='json')\n url = f\"/api/places/{response.data['id']}/\"\n\n response = client.put(url, self.restaurant_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_06_put(self, mock_readall, mock_writeall, mock_shred,\n mock_config, mock_verks):\n self._init()\n udocker.Config = mock_config\n udocker.Config.tmpdir = \"/tmp\"\n kstore = udocker.KeyStore(\"filename\")\n self.assertFalse(kstore.put(\"\", \"\", \"\"))\n mock_readall.return_value = dict()\n kstore.put(self.url, self.auth, self.email)\n mock_writeall.assert_called_once_with(self.credentials)", "def testPutEntry(self):\n cache = ActionCache()\n action = BaseAction('x')\n cache.append(action)\n self.failUnless(cache.contains(action))", "def test_put():\n\n start_ln = len(routes.routes['PUT'])\n\n @put('/s/foo')\n def foo_route(request):\n return 200, ''\n\n for path, fn in routes.routes['PUT']:\n if fn == foo_route:\n found = (path, fn)\n assert found\n routes.routes['PUT'].remove(found)\n assert len(routes.routes['PUT']) == start_ln", "def test_costcenter_put_permissions(self):\n costCenterPK = CostCenter.objects.get(name='c2c1').pk\n url = reverse('CostCenter-detail', kwargs={'pk': costCenterPK})\n response = self.client.put(url, self.data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(CostCenter.objects.get(pk=costCenterPK).name,\n 'testCostCenter')", "def test_put_check_content(self):\n kazoo.client.KazooClient.create.side_effect = (\n kazoo.client.NodeExistsError)\n kazoo.client.KazooClient.get.return_value = (b'aaa', {})\n zkclient = kazoo.client.KazooClient()\n zkutils.put(zkclient, '/a', 'aaa', check_content=True)\n self.assertFalse(kazoo.client.KazooClient.set.called)\n\n zkutils.put(zkclient, '/a', 'bbb', check_content=True)\n kazoo.client.KazooClient.set.assert_called_with('/a', b'bbb')", "def put(self, *args, **kwargs):\n return self.handle_put_request()", "def test_put_object_exceptions(self):\n # key is None\n err = None\n try:\n self.bos.put_object(self.BUCKET, None, None, 100, None)\n except ValueError as e:\n err = e\n finally:\n self.assertIsNotNone(err)\n # too long\n err = None\n try:\n self.bos.put_object(self.BUCKET, self.KEY, None, 6 * 1024 * 1024 * 1024, None)\n except ValueError as e:\n err = e\n finally:\n self.assertIsNotNone(err)", "def testUpdate(self):\n data = {'name': 'toto2'}\n response = requests.put(url=self.url, json=data)\n headers = response.headers\n json_data = response.json()\n\n self.assertTrue(self.place == storage.get(Place, self.place_id))\n self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_MSG)\n self.assertEqual(\n headers['Content-Type'], 'application/json', WRONG_TYPE_RETURN_MSG)\n storage.reload()\n place = storage.get(Place, self.place_id)\n self.assertEqual(place.name, 'toto2')\n self.assertIn('name', json_data, MISSING_NAME_ATTR_MSG)\n self.assertIn('number_rooms', json_data, MISSING_ROOM_NB_ATTR_MSG)\n self.assertIn('number_bathrooms', json_data,\n MISSING_BATHROOM_NB_ATTR_MSG)\n self.assertIn('price_by_night', json_data,\n MISSING_PRICE_BY_NIGHT_ATTR_MSG)\n self.assertIn('user_id', json_data, MISSING_USER_ID_ATTR_MSG)\n self.assertIn('city_id', json_data, MISSING_CITY_ID_ATTR_MSG)\n self.assertIn('created_at', json_data, MISSING_CREATED_AT_ATTR_MSG)\n self.assertIn('updated_at', json_data, MISSING_UPDATED_AT_ATTR_MSG)\n self.assertIn('__class__', json_data, MISSING_CLASS_ATTR_MSG)\n self.assertEqual(json_data['name'], 'toto2')\n storage.delete(place)\n storage.save()", "def test_aws_service_api_vm_tag_put(self):\n pass", "def test_put_validation_fail(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n\n with mock.patch('user_profile.views.profile_validator') as profile_validator:\n profile_validator.return_value = False\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def simulate_put(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'PUT', path, **kwargs)", "def test_post(self):\n self.response = self.client.put(self.url, dict(name='São Paulo'))\n self.assertEqual(\n status.HTTP_405_METHOD_NOT_ALLOWED, self.response.status_code)", "def put(self, key, headers, value, metadata=None):", "def test_patch_bucket(self):\n pass", "def simulate_put(self, path='/', **kwargs) -> _ResultBase:\n return self.simulate_request('PUT', path, **kwargs)", "def test_aws_service_api_vm_workshift_put(self):\n pass", "async def simulate_put(self, path='/', **kwargs) -> _ResultBase:\n return await self.simulate_request('PUT', path, **kwargs)", "def test_update_currency_using_put(self):\n pass", "def test_user_id_put(self):\n pass", "def test_put(self):\n url = reverse('events:EventView')\n response = self.client.post(url, self.valid_payload, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n\n url = reverse('events:EventUpdateDeleteView', args={response.data['id']})\n response = self.client.post(url, self.valid_payload, format='json')\n\n response = self.client.put(url, self.valid_payload,format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n response = self.client.get(url, self.valid_payload,format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def put(self, subject: any) -> any:\n pass", "def test_put_list_replace(self):\n self.story.places.add(*list(Place.objects.filter(name__in=('Humboldt Park', 'Wicker Park'))))\n self.story.save()\n self.assertEqual(self.story.places.count(), 2)\n put_data = [place.place_id for place in\n Place.objects.filter(name=\"Logan Square\")]\n self.api_client.client.login(username=self.username, password=self.password)\n uri = '/api/0.1/stories/%s/places/' % (self.story.story_id)\n response = self.api_client.put(uri, format='json', data=put_data)\n self.assertHttpAccepted(response)\n self.story = Story.objects.get(story_id=self.story.story_id)\n self.assertEqual(self.story.places.count(), 1)\n ids = [place.place_id for place in self.story.places.all()]\n self.assertEqual(ids, put_data)", "def test_PUT_success(self):\n self.client.login(username=\"testuser1\", password=\"1X<ISRUkw+tuK\")\n actualizacion = reverse(\"musica:detail\", kwargs={\"pk\": self.musica1.id})\n data_info = {\n \"cancion\": \"cancion que deberia estar incluida en mi lista\",\n \"artista\": \"artista test\",\n \"año\": \"2021-06-16\",\n \"genero\": \"Metal\",\n }\n respuesta_actualizada = self.client.put(actualizacion, data_info)\n print(respuesta_actualizada.data)\n self.assertEqual(200, respuesta_actualizada.status_code)\n valor_consulta = Musica.objects.get(id=respuesta_actualizada.data[\"id\"])\n # print(valor_consulta)\n self.assertEqual(respuesta_actualizada.data[\"cancion\"], valor_consulta.cancion)\n self.assertEqual(respuesta_actualizada.data[\"artista\"], valor_consulta.artista)\n self.assertEqual(respuesta_actualizada.data[\"año\"], str(valor_consulta.año))", "def test_validate_put_existing(client):\n response = client.put(\n '/user/1',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': '[email protected]',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def test_update_no_match(self):\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.task_storage.delete(self.my_task.key)\n\n self.my_task.title = 'foo'\n\n self.key = self.task_storage.update(self.my_task)\n\n self.assertIsNone(self.key)", "def test_put_list_new(self):\n self.story.save()\n self.assertEqual(self.story.places.count(), 0)\n put_data = [place.place_id for place in\n Place.objects.filter(name=\"Logan Square\")]\n self.api_client.client.login(username=self.username, password=self.password)\n uri = '/api/0.1/stories/%s/places/' % (self.story.story_id)\n response = self.api_client.put(uri, format='json', data=put_data)\n self.assertHttpAccepted(response)\n self.story = Story.objects.get(story_id=self.story.story_id)\n self.assertEqual(self.story.places.count(), 1)\n ids = [place.place_id for place in self.story.places.all()]\n self.assertEqual(ids, put_data)", "def simulate_put(self, path='/', **kwargs):\n return self.simulate_request('PUT', path, **kwargs)", "def test_groups_group_users_put(self):\n pass", "def test_groups_group_users_put(self):\n pass", "def test_specific_order_put_authentication(self):\n # Test with user token\n response = self.client.put(\n 'api/v1/parcels/100', headers=self.user_token_dict)\n data = json.loads(response.data)\n self.assertEqual(data, {message: 'Cannot perform this operation'})\n self.assertEqual(response.status_code, 401)\n # Test with invalid token\n response = self.client.put(\n 'api/v1/parcels/100', headers={'token': 'jonjffriu8u483u8384u82'})\n data = json.loads(response.data)\n self.assertEqual(data, {message: 'Invalid token'})\n self.assertEqual(response.status_code, 401)", "def test_update_one(self):\n pass", "def test_put_without_shard_id(self):\n\n url = reverse('file')\n\n data = {\n #'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_put(self):\n cache = LRUCache(5)\n assert 0 == cache.size\n cache.put(1, 'aaa')\n assert 1 == cache.size", "def test_add_or_update_case(self):\n pass", "def test_put_no_profile(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n response = self.second_client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def test_put_recipe(self):\n recipe = sample_recipe(self.user)\n recipe.tags.add(sample_tag(self.user))\n payload = {\n 'title': 'Ham hack',\n 'time_minutes': 38,\n 'price': 33.00\n }\n res = self.client.put(detail_url(recipe.id), payload)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n recipe.refresh_from_db()\n serializer = RecipeSerializer(recipe)\n self.assertEqual(res.data, serializer.data)\n self.assertEqual(recipe.title, payload['title'])\n tags = recipe.tags.all()\n self.assertEqual(len(tags), 0)", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_and_get():\n test_key = 'qmk_compiler_test_unique_key_name'\n\n # Make sure our test key doesn't exist\n try:\n qmk_storage.get(test_key)\n raise RuntimeError('%s exists on S3 when it should not!' % test_key)\n except Exception as e:\n if e.__class__.__name__ != 'NoSuchKey':\n raise\n\n # Write it to S3\n qmk_storage.put(test_key, 'hello')\n\n # Make sure we can retrieve it\n saved_file = qmk_storage.get(test_key)\n qmk_storage.delete(test_key)\n assert saved_file == 'hello'", "def _put(self, name, document):\n raise NotImplementedError", "def test_db_creating_put(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.models.Notification.update') as notification_update:\n notification_update.return_value = False\n\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)" ]
[ "0.8060677", "0.7866297", "0.78294384", "0.7793337", "0.7793337", "0.7779294", "0.7599597", "0.74996424", "0.74829245", "0.7425634", "0.7425499", "0.7411632", "0.7402768", "0.7373352", "0.7357826", "0.73161113", "0.7281676", "0.72784895", "0.72398466", "0.7162743", "0.7127128", "0.7077551", "0.7064592", "0.70644325", "0.70541054", "0.7048762", "0.7048762", "0.7032555", "0.702741", "0.70140386", "0.6992264", "0.6971134", "0.69691503", "0.69367576", "0.69154567", "0.69057435", "0.6876547", "0.6867575", "0.68577117", "0.6855628", "0.6848476", "0.6832973", "0.68195397", "0.6801935", "0.67999446", "0.6793843", "0.6786237", "0.67686933", "0.67667353", "0.6761957", "0.6761957", "0.6761957", "0.6755679", "0.675232", "0.6750297", "0.67424107", "0.6728384", "0.67268246", "0.67119646", "0.6703522", "0.6703089", "0.6694693", "0.66945124", "0.6675852", "0.6673905", "0.6661538", "0.6657968", "0.66524374", "0.6645236", "0.6644788", "0.66308683", "0.6626606", "0.659841", "0.65979034", "0.65915525", "0.6585289", "0.65821403", "0.6581355", "0.6576038", "0.6556364", "0.6550068", "0.6545072", "0.6539684", "0.65343446", "0.65244615", "0.652237", "0.6521954", "0.65205085", "0.65205085", "0.65200555", "0.65112126", "0.64840317", "0.6478598", "0.6475184", "0.6472669", "0.64602655", "0.64547485", "0.6452134", "0.6442077", "0.6440617", "0.64329463" ]
0.0
-1
Test the goodbye method
def test_goodbye(self): with mock.patch("builtins.print") as mock_print: hangman.cli.screen.Screen.goodbye() output = ",".join([str(x) for x in mock_print.call_args_list]) self.assertTrue("Goodbye" in output)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def goodbye(self, args):\n\t\tself.write_line(\"GOODBYE\")\n\t\tself.close();", "async def testgoodbye(self, ctx, *, member = None):\n\n # Check if we're suppressing @here and @everyone mentions\n if self.settings.getServerStat(ctx.message.guild, \"SuppressMentions\"):\n suppress = True\n else:\n suppress = False\n\n isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator\n if not isAdmin:\n checkAdmin = self.settings.getServerStat(ctx.message.guild, \"AdminArray\")\n for role in ctx.message.author.roles:\n for aRole in checkAdmin:\n # Get the role that corresponds to the id\n if str(aRole['ID']) == str(role.id):\n isAdmin = True\n\n # Only allow admins to change server stats\n if not isAdmin:\n await ctx.channel.send('You do not have sufficient privileges to access this command.')\n return\n\n if member == None:\n member = ctx.message.author\n if type(member) is str:\n memberName = member\n member = DisplayName.memberForName(memberName, ctx.message.guild)\n if not member:\n msg = 'I couldn\\'t find *{}*...'.format(memberName)\n # Check for suppress\n if suppress:\n msg = Nullify.clean(msg)\n await ctx.channel.send(msg)\n return\n # Here we have found a member, and stuff.\n # Let's make sure we have a message\n message = self.settings.getServerStat(ctx.message.guild, \"Goodbye\")\n if message == None:\n await ctx.channel.send('Goodbye message not setup. You can do so with the `{}setgoodbye [message]` command.'.format(ctx.prefix))\n return\n await self._goodbye(member, ctx.message.guild, ctx.message.channel)\n\n # Print the goodbye channel\n welcomeChannel = self.settings.getServerStat(ctx.message.guild, \"WelcomeChannel\")\n if welcomeChannel:\n for channel in ctx.message.guild.channels:\n if str(channel.id) == str(welcomeChannel):\n welcomeChannel = channel\n break\n if welcomeChannel:\n msg = 'The current goodbye channel is **{}**.'.format(welcomeChannel.mention)\n else:\n if self._getDefault(ctx.guild):\n msg = 'The current goodbye channel is the default channel (**{}**).'.format(self._getDefault(ctx.guild).mention)\n else:\n msg = 'There is *no channel* set for goodbye messages.'\n await ctx.channel.send(msg)", "def test_quit_game(run):\n out, _ = run(dork.cli.quit_game)\n assert \"Thank you\" in out", "def test_teardown(self):\n with pytest.raises(NotImplementedError):\n self.behaviour.teardown()", "def teardown(self, exception):", "def _bye(self):\n self.get(\"BYE\",'')\n self.send()", "def teardown_function(self):\r\n raise AppModule.Unimplemented()", "def test_teardown(self):\n with pytest.raises(NotImplementedError):\n self.handler.teardown()", "def teardown(self) -> None:", "def teardown(self) -> None:", "def teardown(self) -> None:", "def teardown(self):", "def teardown(self):", "def teardown(self):", "def test_terminate_run(self):\n pass", "def __exit__(self, *args):\n if self.teardown:\n super().__exit__(*args)", "def teardown_method(self):", "def test_teardown(self):\n assert self.http_handler.teardown() is None\n self.assert_quantity_in_outbox(0)", "def test_teardown(self):\n assert self.search_behaviour.teardown() is None\n self.assert_quantity_in_outbox(0)", "def die(self):\n pass", "def test_uninstall(self):\n pass", "def quit(self, reason=\"\", *args, **kwargs):\n pass", "def stopTest(self, test):", "def test_do_quit(self):\n for string in self.random_strings:\n self.assertTrue(self.CommandParser.do_quit(string))", "def teardown(self):\n pass", "def teardown(self):\n pass", "def teardown(self):\n pass", "async def goodbyemessage(self, ctx):\n await util.command_group_help(ctx)", "def teardown(self,**kwargs):\n pass", "def do_exit(self,*args):\r\n return True", "def tearDown(self):\n\t\tprint(\"end test\")\n\t\tpass", "def testBasicQuit(self):\n self.client_connect()\n self.client_send(\"get keyNotThere\\r\\n\")\n self.mock_recv('get keyNotThere\\r\\n')\n self.mock_send('END\\r\\n')\n self.client_recv(\"END\\r\\n\")\n\n self.client_send(\"quit\\r\\n\")\n self.assertTrue(self.mock_quiet())", "def test_endFunctionKEy(self):\n return self._endTest(ServerProtocol.END)", "def tearDown(self):\n self.brow.quit()", "def test_lose(self):\n self.choice.return_value = \"ant\" \n self.input.side_effect = list(\"bcdefg\" \"n\")\n\n gallows.main()\n\n self.xprint.assert_any_call('You have run out of guesses!')", "def test_clean_exit(self):\n ch = connection_helper()\n qr = list_test_artifacts(None, ch.tables)\n self.assertFalse(bool(qr), \"\"\"Run 'removefacts --conf <config> --removetestlist' or \nexecute 'tests/scripts/removetestfacts.py' to fix\"\"\")", "def teardown(self, rc):\n pass", "def do_quit(self, args):\n print('Good Bye!')\n exit()", "def exit(self):\n pass", "def test_quit(self):\n _help = 'Quit method to exit form cmd '\n _help += 'program (Usage: quit)\\n'\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"help quit\")\n self.assertEqual(f.getvalue(), _help)", "def stopTestRun(self):", "def do_bye(self, arg):\n if arg != \"logout\":\n print(\"Shutting down admin client, please wait...\")\n self.api.logout()\n return True", "def quitting(self):\n pass", "def teardown(self) -> None:\n pass", "def teardown(self) -> None:\n pass", "def tearDown(self):\n self.teardown_beets()", "def XXtearDown(self):\n print(\"FooTest:tearDown_:begin\")\n ## do something...\n print(\"FooTest:tearDown_:end\")", "def XXtearDown(self):\n print(\"FooTest:tearDown_:begin\")\n ## do something...\n print(\"FooTest:tearDown_:end\")", "def quitme(self, evt=None):\n if evt:\n self.dbgprint(\"bye!\")\n sys.exit()", "def goodbye(reason):\n try:\n reactor.stop()\n except ReactorNotRunning:\n pass\n return origLost(reason)", "def tearDown(self):\n self.hass.stop()", "def tearDown(self):\n self.hass.stop()", "def tearDown(self):\n self.hass.stop()", "def tearDown(self):\n self.hass.stop()", "def do_quit(self, arg):\n\n print('Good Bye!')\n exit()", "def shutdown(self):\n ev3.Sound.speak(\"Goodbye\")", "def do_exit(self, _):\n return True", "def teardown_method(self):\n self.hass.stop()", "def teardown_method(self):\n self.hass.stop()", "def teardown_method(self):\n self.hass.stop()", "def test_teardown(self):\n assert self.transaction_behaviour.teardown() is None\n self.assert_quantity_in_outbox(0)", "def __exit__(self, type, value, tb):\t\t\n\t\tself.disconnectTeamserver()", "def teardown(self):\n pass # pylint: disable=unnecessary-pass", "def bcp_goodbye(self, **kwargs):\n if self.config['mediacontroller']['exit_on_disconnect']:\n self.socket_thread.sending_thread.stop()\n sys.exit()", "def test_simple(self):\n hooks = Hooks()\n assert_that(hooks.cleanup, equal_to(''))", "def tearDown(self):\n zope.component.testing.tearDown()", "def assert_clean_exit():\n with pytest.raises(SystemExit) as e:\n yield\n assert e.value.code == 0", "def tearDown(self):\n print('Calling \\'tearDown\\'')", "def test_closed(self):\n server, client = loopback()\n server.sock_shutdown(2)\n with pytest.raises(SysCallError) as err:\n server.sendall(b\"hello, world\")\n if platform == \"win32\":\n assert err.value.args[0] == ESHUTDOWN\n else:\n assert err.value.args[0] == EPIPE", "def test_does_die(self):\n self.herb.fitness = 0\n self.herb.params[\"omega\"] = 1\n nt.assert_true(self.herb.death())", "def call_quit(self, _):\n return True", "def call_quit(self, _):\n return True", "def call_quit(self, _):\n return True", "def __exit__(self, exc_type, exc_val, exc_tb):\n\n self.quit()", "def die():\n print(\"*\"*13)\n print(\"* Goodbye!! *\")\n print(\"*\" * 13)\n quit()", "def teardown(self, event):\n pass", "def teardown(self):\n raise NotImplementedError", "def teardown(self):\n raise NotImplementedError", "def stop_fixture(self):\n pass", "def tearDownClass(cls):\r\n print(\"==========================\")\r\n print(\"Cleaning mess after testing!\")", "def __exit__(self, *args):\n pass", "def quit(self, reason=None):\n raise NotImplementedError", "def end():\n return say()", "def test_unloadable(self):\n pass", "def tearDown(self) -> None:\n pass", "def tearDown(self) -> None:\n pass", "def tearDown(self) -> None:\n pass", "def tearDown(self) :\n pass", "def tearDown(self) :\n pass", "def tearDown(self) :\n pass", "def do_exit(self, s):\n return True", "def quit(self, *args, **kwargs):\n pass", "def _tearDown(self):\r\n\r\n if core.FW_conf['connection'].isLeader() and core.FW_conf['settings'].TestRun.BLTEnabledInFollower:\r\n executeInFollower(\"core.FW_conf['blt_ue'].stopCurrentMeasuring()\")\r\n\r\n # stop current measurement if battery is available\r\n if core.FW_conf['connection'].battery is not None and core.FW_conf['connection'].battery.isEnabled():\r\n core.FW_conf['connection'].battery.stopCurrentMeasuring()\r\n\r\n # skip tearDown if systemExit exception has occurred or\r\n # we are stopping execution or teardown skipping is wanted\r\n if not self._raiseSystemExit and not core.FW_conf['should_stop']:\r\n debug.out(\"MarbleTestCase tearDown\")\r\n\r\n self.logApply(core.FW_conf['connection']._tearDown, self)\r\n\r\n for remote in core.FW_conf['remote_connection']:\r\n self.logApply(remote._tearDown, self)", "def tearDown(self):\n print('tearDown method\\n')", "def test_quit(self):\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"quit\")\n out = f.getvalue()\n self.assertTrue(len(out) == 0)\n self.assertEqual(\"\", out)\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"quit fake\")\n msj = f.getvalue()\n self.assertTrue(len(msj) == 0)\n self.assertEqual(\"\", msj)", "def tearDown(self): # pylint: disable=invalid-name\n self.hass.stop()", "def tearDown(self): # pylint: disable=invalid-name\n self.hass.stop()", "def tearDown(self): # pylint: disable=invalid-name\n self.hass.stop()", "def tearDown(self): # pylint: disable=invalid-name\n self.hass.stop()", "def quit(self):\n pass" ]
[ "0.7509933", "0.6987562", "0.6891158", "0.6829135", "0.68261707", "0.68175083", "0.6816748", "0.6797453", "0.6768984", "0.6768984", "0.6768984", "0.67623085", "0.67623085", "0.67623085", "0.67440975", "0.66107154", "0.658389", "0.6566832", "0.65569544", "0.65374553", "0.6533507", "0.6510029", "0.6507859", "0.6487468", "0.64665747", "0.64665747", "0.64665747", "0.6445463", "0.644296", "0.64395785", "0.6436149", "0.6410975", "0.639748", "0.63922715", "0.6385605", "0.6379972", "0.6377539", "0.6376344", "0.63751334", "0.63733137", "0.63714474", "0.6360068", "0.63525814", "0.6338038", "0.6338038", "0.63275874", "0.63222975", "0.63222975", "0.6320589", "0.63188756", "0.63075316", "0.63075316", "0.63075316", "0.63075316", "0.63054645", "0.62966585", "0.6296545", "0.6281243", "0.6281243", "0.6281243", "0.6274542", "0.62713486", "0.6270277", "0.62646323", "0.62625766", "0.62603873", "0.62550086", "0.6252184", "0.624043", "0.6232854", "0.622694", "0.622694", "0.622694", "0.6225559", "0.62236565", "0.62047917", "0.6202984", "0.6202984", "0.62026143", "0.62011516", "0.61964273", "0.6172981", "0.6169127", "0.6165274", "0.61640877", "0.61640877", "0.61640877", "0.61633575", "0.61633575", "0.61633575", "0.61621636", "0.61529976", "0.6151692", "0.6137633", "0.6133796", "0.61321205", "0.61321205", "0.61321205", "0.61321205", "0.612553" ]
0.7486902
1
Returns the total number of hives in this apiary.
def hives_count(self) -> int: return self.hives.count()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count(self) -> float:\n return pulumi.get(self, \"count\")", "def count(self) -> int:\n return pulumi.get(self, \"count\")", "def total_number_of_animals(self):\n animals = self.animal()\n print 'Total number of animals on island: {:4}'.format(\n animals[\"Herbivores\"] + animals[\"Carnivores\"])", "def totalCount(self):\n return sum(self.values())", "def totalCount(self):\n return sum(self.values())", "def totalCount(self):\n return sum(self.values())", "def getHP(self):\n return len(self.deck)", "def count(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'count')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def total_count(self) -> int:\n return self.__total_count", "def total(self):\n return self._evaluate()['hits']['total']", "def tally(self):\n return self.count", "def count(self):\n return self.get_count()", "def get_total_count(self):\n return self.total_count", "def response_count(self) -> int:\n return pulumi.get(self, \"response_count\")", "def get_inventory_count(self):\n resp = self.app.get('/inventories')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = json.loads(resp.data)\n return len(data)", "def GetCount(self):\n return self._server.get_count()", "def total_cards(self):\n amount = 0\n for palo in self._cards:\n amount = amount + len(self._cards[palo])\n\n return amount", "def num_herbs(self):\n return self._num_herbs", "def get_count(cls):\n total = 0\n for counter in SimpleCounterShard.objects.all():\n total += counter.count\n return total", "def Count(self):\r\n\t\treturn self._get_attribute('count')", "def Count(self):\r\n\t\treturn self._get_attribute('count')", "def get_count(self):\n\n\t\treturn self.__count", "def total(self) -> int:\n return self._total", "def num_animals(self):\n return self._num_herbs + self._num_carns", "def herb_count(self):\n return len(self.herbivores)", "def get_total_instruments(self):\n\n total = 0\n for exchange in self.exchanges:\n total += len(exchange.symbols)\n return total", "def get_count(self):\r\n return self.count", "def get_total_expenses(self):\n return sum(self.expenses.values())", "def get_TotalCount(self):\n return self._output.get('TotalCount', None)", "def Count(self):\n return self._get_attribute('count')", "def get_count(self):\n return self.count", "def get_count(self):\n return self.count", "def GetCount(self):\n return(self.count)", "def get_num_petals(self):\n return self._num_petals", "def count(self) -> int:\n return self._count", "def count(self) -> int:\n return self._count", "def count(self) -> int:\n return self._count", "def count(self):\n \n return self._count", "def count(self):\n # TODO not implemented yet\n return 0", "def count(self):\n\n return self._get(\"count\", rtype=UInt)", "def get_count(self):\n return self._count", "def count(self) -> int:\n return self.__count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def Count(self) -> int:", "def Count(self) -> int:", "def Count(self) -> int:", "def Count(self) -> int:", "def count(self):\n return self.size()", "def total_pulls(self) -> int:\n return self.__total_pulls", "def response_count(self):\n return self.responses.count()", "def get_total_appliance(self):\n total = 0\n for appliance in self.get_appliances():\n total += appliance.get_total()\n return total", "def total(self) -> int:\n if self._total is None:\n self._total = self.counts.sum()\n return self._total", "def count(self):\n return self.vcount", "def totalcounts(self):\n return self.datacounts + self.bkgdcounts", "def getTotalIndividualCount(self):\r\n return self._n", "def count(self) -> Optional[float]:\n return pulumi.get(self, \"count\")", "def total(cls) -> int:\n entity_list = cls.query.all()\n if entity_list:\n return len(entity_list)\n return 0", "def get_amount(self): \n return len(self.get_cards())", "def total_entry_count(self):\n return self.first_entry_count + self.subsequent_entries_count", "def get_item_count(self):\n resp = self.app.get('/items')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = json.loads(resp.data)\n return len(data)", "def __len__(self):\n total_objs = 0\n\n if self._shelve is not None:\n total_objs += len(self._shelve)\n\n if self._dict is not None:\n total_objs += len(self._dict)\n\n return total_objs", "def count(self):\n return self._lift(\"count\")", "def total(self):\n return self._results.total", "def size(self):\n\t\treturn self._count", "def get_total_bets(self) -> int:\n return self._total_bet_count.get() + self._daily_bet_count.get()", "def total_record_count(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"total_record_count\")", "def count(self) -> pulumi.Input[float]:\n return pulumi.get(self, \"count\")", "def count(self) -> pulumi.Input[float]:\n return pulumi.get(self, \"count\")", "def total_answer_count(self):\n # TODO(sll): Cache this computed property.\n total_count = 0\n for answer, count in self.answers.iteritems():\n total_count += count\n return total_count", "def GOAL_TOTAL() -> int:\n return 21", "def get_length(self):\n length = 0\n for card in self.decklist:\n length += card.amount\n return length", "def getCount(self):\n return self.count", "def count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"count\")", "def total_count(self):\n return self.applied_count + self.error_count", "def total_record_count(self) -> int:\n return pulumi.get(self, \"total_record_count\")", "def __numHeads(self):\n count = 1\n\n while (self.__coinFlip() == 1):\n count += 1\n return count", "def count(self):\n return len(self)", "def getCount(self):\n return self.base.get(\"count\", [])", "def retrieve_num_instances(service):\n instance_counts = service[\"instance-counts\"]\n return instance_counts[\"healthy-instances\"] + instance_counts[\"unhealthy-instances\"]", "def count(self):\n return self.properties.get('count')", "def __len__(self, context=None):\n if context is not None:\n context = self._repair_context(context)\n uri = self.rest_services[\"size\"]\n payload=dict()\n if context:\n context = context.n3()\n payload[\"context\"] = context\n r = requests.get(uri, params = payload)\n return int(r.text)", "def __len__(self):\n return sum(item[\"quantity\"] for item in self.carro.values())", "def count(self):\n return {'count': self.collection.count()}", "def __len__(self):\n return int(self.total)", "def totalsize(self):\n return sum([sz for sz in self.iterate()])", "def count(self):\n return len(self.deck)", "def getLength(self):\n return self.count", "def get_size(self) -> int:\n total_size = 0\n for entry in self.__entries:\n total_size += entry.get_size()\n return total_size", "def count(self):\n return self._reduce_for_stat_function(F.count, only_numeric=False)", "def count(self) -> Optional[int]:\n return pulumi.get(self, \"count\")", "def count(self) -> Optional[int]:\n return pulumi.get(self, \"count\")" ]
[ "0.6977683", "0.6883911", "0.6836276", "0.6753868", "0.6753868", "0.6753868", "0.6739675", "0.67286235", "0.67053115", "0.66854024", "0.6670815", "0.66571957", "0.6634862", "0.6606549", "0.65562564", "0.6529486", "0.6525879", "0.65013546", "0.64641476", "0.64448", "0.64448", "0.6438567", "0.64284086", "0.642345", "0.6422671", "0.64137036", "0.63925606", "0.6389981", "0.6388301", "0.63857096", "0.6382712", "0.6382712", "0.63755095", "0.63605833", "0.63578695", "0.63578695", "0.63578695", "0.6350555", "0.6343425", "0.6338614", "0.6332064", "0.63258505", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.6322762", "0.63132554", "0.63132554", "0.63132554", "0.63132554", "0.6307168", "0.6290977", "0.6288465", "0.6271012", "0.62696314", "0.6269067", "0.62689185", "0.62629324", "0.6259738", "0.62568253", "0.6221605", "0.62166196", "0.62136453", "0.6211441", "0.6196329", "0.61941665", "0.61919284", "0.6180781", "0.61805886", "0.6179737", "0.6179737", "0.6177273", "0.61728865", "0.6170866", "0.61551034", "0.61520225", "0.6144284", "0.6143635", "0.6115979", "0.61134976", "0.61036307", "0.6102532", "0.6101286", "0.609488", "0.6087107", "0.6077702", "0.6071809", "0.60711694", "0.60691035", "0.60668206", "0.6062487", "0.60607904", "0.6055852", "0.6055852" ]
0.8205529
0
Performs an arbitrary mapping on the data and returns the result. This is typically an expansion of the data. For instance, the quadratic expansion for data like ( a b ) is ( a b a^2 b^2 ab) ( c d ) ( c d c^2 d^2 cd) ( ... ) ( ... )
def getExpansion(self, data): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def map():", "def tuple_map(x):\n return x * 2", "def reflect(data, mapfunc = lambda x:x):\n data2 = np.zeros([tsize, npsi])\n # Copy the original data\n for i in np.arange(ntheta):\n data2[i,:] = data[i,:]\n # Now fill in the remainder\n for i in np.arange(ntheta, tsize):\n t0 = tsize - 1 - i\n data2[i,:] = mapfunc(data[t0,:])\n return data2", "def Map(dataset, map_func, input_columns=None):\n return dataset.map(map_func)", "def recursive_map(func, data):\n\n def recurse(item):\n return recursive_map(func, item)\n\n items_mapped = map_collection(recurse, data)\n return func(items_mapped)", "def _maping(x,y,l,a):\n newx = (x**2 *(l* ((x**2 + y**2)**(a/2) - 1) + 2) - l * y**2 *((x**2 + y**2)**(a/2) - 1))/(x**2 + y**2) \n newy = (2 * x* y *(l* ((x**2 + y**2)**(a/2) - 1) + 1))/(x**2 + y**2)\n return newx, newy", "def adem_basis_elt_2_map(*, Sq_fn, basis_elt):\r\n return [Sq_fn(Sq) for Sq in basis_elt]", "def map(z):\n pass", "def feature_mapping(x, y, power, as_ndarray=False):\n # data = {}\n # # inclusive\n # for i in np.arange(power + 1):\n # for p in np.arange(i + 1):\n # data[\"f{}{}\".format(i - p, p)] = np.power(x, i - p) * np.power(y, p)\n\n data = {\"f{}{}\".format(i - p, p): np.power(x, i - p) * np.power(y, p)\n for i in np.arange(power + 1)\n for p in np.arange(i + 1)\n }\n\n if as_ndarray:\n return pd.DataFrame(data).as_matrix()\n else:\n return pd.DataFrame(data)", "def _translate(self, data):\n pass\n return [i*2 for i in data]", "def mapping(array, map):\r\n n = map.shape[0] \r\n if array.dtype == np.complex :\r\n arrayout = np.zeros((2*n),dtype=array.real.dtype)\r\n for ii in range(n):\r\n i = map[ii,0]\r\n j = map[ii,1]\r\n arrayout[ii] = array[i,j].real\r\n arrayout[ii+n] = array[i,j].imag\r\n else :\r\n arrayout = np.zeros((n),dtype=array.dtype)\r\n for ii in range(n):\r\n i = map[ii,0]\r\n j = map[ii,1]\r\n arrayout[ii] = array[i,j]\r\n return arrayout", "def Map(\r\n data,\r\n map_fct: Callable,\r\n info: List[Dict] = None,\r\n lazy: bool = True,\r\n workers: int = 1,\r\n buffer_len: int = 3,\r\n *arg: list,\r\n **kwargs: Dict\r\n) -> Union[MapAbstract, DataAbstract, np.ndarray, list]:\r\n\r\n if lazy:\r\n return MapAbstract(data, map_fct, *arg, info=info, **kwargs)\r\n else:\r\n return DataAbstract(\r\n MapAbstract(data, map_fct, *arg, info=info, **kwargs),\r\n workers=workers,\r\n buffer_len=buffer_len,\r\n )[:]", "def map_reshaper(map):\n a = [[map[int(i / 2), int(j / 2)] for j in range(50)] for i in range(50)]\n return np.array(a)", "def dynamic_shape_map(xs, y):\n\n def body(x, y):\n return x + y\n\n return map(body, xs, y)", "def test_map(self, start: Result[int, str], exp: Result[int, str]) -> None:\n assert start.map(lambda x: int(x ** 2)) == exp", "def list_map(data, function):\n return list(map(function, data))", "def map(keys, values) -> MapValue:\n return ops.Map(keys, values).to_expr()", "def map(inputs, e0,e1,k):\r\n codebook = tf.cast(inputs[0][0:2 ** k], tf.float32)\r\n soft_map = tf.TensorArray(tf.float32, size=0, dynamic_size=True)\r\n for y in inputs[1]:\r\n Pxy_map = pyx(y, codebook, e0, e1)\r\n soft_map = soft_map.write(soft_map.size(), Pxy_map)\r\n\r\n soft_map = soft_map.stack()\r\n return soft_map", "def map_onto(self, xmapping):\n mapped_axis = xmapping.value_label\n if not self.has_axis(mapped_axis):\n raise ArrayMappingError('Value label \"%s\" of xmapping not found '\n 'in array axes (%s)'\n % (mapped_axis,\n ', '.join(self.axes_names)))\n\n if not set(xmapping.data.flat).issuperset(self.get_domain(mapped_axis)):\n raise ArrayMappingError('Domain of axis \"%s\" to be mapped is not a '\n 'subset of values in the mapping array.'\n % mapped_axis)\n dest = None\n for mval in self.get_domain(mapped_axis):\n sub_a = self.sub_cuboid(**{mapped_axis: mval})\n sub_mapping = self.xndarray_like(\n xmapping, data=xmapping.data == mval)\n rsub_a = sub_a.repeat(sub_mapping.sum(), '__mapped_axis__')\n dest = rsub_a.cexpand(sub_mapping, '__mapped_axis__', dest=dest)\n return dest", "def MapDataList(ea, length, func, wordsize=1):\n PutDataList(ea, map(func, GetDataList(ea, length, wordsize)), wordsize)", "def mapperCRT(elt, p: int, q: int, action: bool = True, Verbose: bool = False):\n # Mapping\n if action:\n a = elt % p\n b = elt % q\n\n if Verbose and q != p:\n print(f\"Converting {elt} in Zpq to a in Zp and b in Zq.\")\n print(f\"With a = {a} mod {p} and b = {b} mod {q}\")\n\n return (a, b)\n\n x = ChineseRemainder(elt, [p, q], Verbose)\n return x", "def __call__(self, x, n):\n if (x, n) not in self.d:\n self.d[(x,n)] = x**n\n return self.d[(x,n)]", "def _full_mapping(self, data):\r\n x = self._empty_mapping()\r\n for key, value in data.items():\r\n x[key] = value\r\n return x", "def map2(inKey, inVal):\n return [([inKey[0]],inKey[1:]+inVal)]", "def math_map(val, src, dst):\n return ((val - src[0]) / (src[1] - src[0])) * (dst[1] - dst[0]) + dst[0]", "def mlp_input_mapper(data, window):\n # get factor to scale data by\n mult = 1 / (window[1] - window[0])\n # return scaled data with minimum value offset\n return mult * (data - window[0])", "def _expand(l,p,d=None):\r\n\t\t\r\n\t\t# number of terms\r\n\t\tt = len(l)\r\n\t\t\r\n\t\t# make power positive\r\n\t\tp = abs(int(p))\r\n\t\t\r\n\t\t# return 1 if power is zero:\r\n\t\tif p == 0:\r\n\t\t\t\r\n\t\t\treturn [{}],d\r\n\t\t\t\r\n\t\t# or return zero if length is zero\r\n\t\tif t < 1:\r\n\t\t\t\r\n\t\t\treturn [],d\r\n\t\t\r\n\t\t# initiate dictionary\r\n\t\tif d is None:\r\n\t\t\t\r\n\t\t\t# begin with tuple of zeroes, i.e., all terms to the power of zero\r\n\t\t\to = (0,) * t\r\n\t\t\td = {o: Te(1)}\r\n\t\t\r\n\t\t# build matrix of each term\r\n\t\tm = []\r\n\t\tfor i in range(t):\r\n\t\t\t\r\n\t\t\t# taken to each power\r\n\t\t\tr = []\r\n\t\t\tfor j in range(p + 1):\r\n\t\t\t\t\r\n\t\t\t\t# make tuple\r\n\t\t\t\to = [0] * t\r\n\t\t\t\to[i] = j\r\n\t\t\t\to = tuple(o)\r\n\t\t\t\t\r\n\t\t\t\t# try to find in dictionary\r\n\t\t\t\ttry:\r\n\t\t\t\t\tx = d[o]\r\n\t\t\t\t\r\n\t\t\t\t# otherwise calculate and put in dictionary\r\n\t\t\t\texcept:\r\n\t\t\t\t\tx = l[i].power(j)\r\n\t\t\t\t\td[o] = x\r\n\t\t\t\t\r\n\t\t\t\t# put in matrix\r\n\t\t\t\tr.append(x)\r\n\t\t\t\t\r\n\t\t\t# add row\r\n\t\t\tm.append(r)\r\n\t\t\t\r\n\t\t# get permutations\r\n\t\tu = Li._distribute(p,t)\r\n\t\t\r\n\t\t# multinomial corfficient is p! / k1! k2! k3! ...\r\n\t\t# compute leading factorial\r\n\t\tf = Te.factorials\r\n\t\ttry:\r\n\t\t\ta = f[p]\r\n\t\texcept:\r\n\t\t\ta = fac(p)\r\n\t\t\t\r\n\t\t# compute terms\r\n\t\tw = []\r\n\t\tfor i in u:\r\n\t\t\t\r\n\t\t\t# compute multinomial\r\n\t\t\tb = 1\r\n\t\t\tc = []\r\n\t\t\tfor n,j in enumerate(i):\r\n\t\t\t\t\r\n\t\t\t\t# look for factorial or compute\r\n\t\t\t\ttry:\r\n\t\t\t\t\tb *= f[j]\r\n\t\t\t\texcept:\r\n\t\t\t\t\tb *= fac(j)\r\n\t\t\t\t\t\r\n\t\t\t\t# append term\r\n\t\t\t\tc.append(m[n][j])\r\n\t\t\t\r\n\t\t\t# multiply out term\r\n\t\t\tw.append(Te(a,b,*c))\r\n\t\t\r\n\t\treturn w,d", "def map(iteratee, *seqs):\n return _map(fnc.iteratee(iteratee), *seqs)", "def mlp_output_mapper(data, window):\n # get factor to scale data by\n mult = window[1] - window[0]\n # return scaled data offset by minimum value\n return mult * data + window[0]", "def mapData(form, data, fromKeyFunc, toKeyFunc):\n rv = {}\n\n def visitItem(node):\n fromName = fromKeyFunc(node)\n toName = toKeyFunc(node)\n\n value = data.get(fromName, MISSING)\n if value != MISSING:\n rv[toName] = value\n\n def visit(node):\n if hasattr(node, 'items'):\n for item in node.items:\n visit(item)\n else:\n visitItem(node)\n\n visit(form)\n return rv", "def generalize_values(data: np.ndarray, qi_indices: np.ndarray, levels: np.ndarray):\n if np.isscalar(qi_indices):\n if qi_indices in range(0, 2):\n data = generalize_string(data, int(qi_indices), int(levels))\n if qi_indices in range(2, 5):\n data = generalize_data(data, int(qi_indices), int(levels))\n elif qi_indices.shape[0] > 1:\n for j in range(qi_indices.shape[0]):\n if qi_indices[j] in range(0, 2):\n data = generalize_string(data, qi_indices[j], levels[j])\n if qi_indices[j] in range(2, 5):\n data = generalize_data(data, qi_indices[j], int(levels[j]))\n\n return data", "def _mappings(self, inputs):\n return self.mapbias + tensor.dot(\n self._factorsX(inputs) * self._factorsY(inputs), self.whf_in.T)", "def map(self, func):\n return _(map(func, self._))", "def _MapValues(geol, arr):\n # TODO: check that geol table contains all indexs found in arr\n # Return the mapped table\n geol.set_index(geol.keys()[0])\n return geol[geol.keys()[1::]].iloc[arr]", "def exp_map(cls, q, eta):\n return q * Quaternion.exp(eta)", "def transform_map(fixed, mutable):\n \n # I have decided I want to write this function in np rather than TF.\n fixed = np.array(fixed)\n mutable = np.array(mutable)\n\n # check that the shape of each is exactly the same\n if fixed.shape != mutable.shape:\n raise ValueError(\"transform_map: both inputs must have exactly the same shape.\")\n \n # fixed and mutable should have shape (n, d). Generate an nxn matrix that holds the \n # distance between each point. distance[n, m] should be the distance between fixed[n] and\n # mutable [m]\n \n point_count = fixed.shape[0]\n index_grid = np.mgrid[0:point_count, 0:point_count]\n\n diff = fixed[index_grid[0]] - mutable[index_grid[1]]\n distance = np.linalg.norm(diff, axis=2)\n\n # use the distance matrix as a cost matrix and use the Hungarian method implemented by \n # scipy to solve the problem\n fixed_indices, mutable_indices = linear_sum_assignment(distance)\n \n # we want to rearrange mutable, but keep the order of fixed. So we want to re-order \n # mutable by the set of mutable_indices generated by the above function call, which \n # themselves are ordered by fixed_indices. During testing, I found that fixed_indices \n # maintained its original order, but I don't want to rely on that. Not sure if this is \n # correct...\n \n return mutable[mutable_indices[fixed_indices]]", "def map_value(self) -> global___Expression.MapValue:", "def unmapping(array, map, N):\r\n n = map.shape[0] \r\n if (array.shape[0] / 2) == N**2 :\r\n arrayout = np.zeros((N,N),dtype=np.complex128)\r\n for ii in range(n):\r\n i = map[ii,0]\r\n j = map[ii,1]\r\n arrayout[i,j] = array[ii] + 1.0J * array[ii+n]\r\n elif array.shape[0] == N**2 :\r\n arrayout = np.zeros((N,N),dtype=array.dtype)\r\n for ii in range(n):\r\n i = map[ii,0]\r\n j = map[ii,1]\r\n arrayout[i,j] = array[ii] \r\n return arrayout", "def applyMapping(self):\n pass", "def map(self, function):\n return FunctionalWrapper(map(function, self.data))", "def matrix_multiply_mapper(m, element):\n\n name, i, j, value = element\n\n if name == \"A\":\n for k in range(m):\n yield ((i, k), (j, value))\n \n else:\n for k in range(m):\n yield((k, j), (i, value))", "def fastMap(mapper, data):\n i = 0\n ans = []\n while i < len(data):\n with Pool(MAX_POOL_SIZE) as pool:\n ans.extend(pool.map(mapper, data[i:i+MAX_POOL_SIZE]))\n i += MAX_POOL_SIZE\n\n return ans", "def exp_map(b, p):\n \"\"\"\n EXP_MAP The exponential map for n-spheres\n b is the base point (vector in R^n), norm(b)=1\n p is a point on the tangent plane to the hypersphere at b (also a vector in R^n)\n\n method can be 0 or 1:\n 0: hypersphere (e.g. quaternions)\n 1: dual quaternion\n \"\"\"\n if np.allclose(b, p):\n x = b\n else:\n theta = np.linalg.norm(b - p)\n dminusbx = np.sqrt(2 - 2. * np.cos(np.pi - theta))\n l = 2. * np.sin(theta / 2)\n alpha = np.arccos((4 + dminusbx ** 2 - l ** 2) / (4 * dminusbx))\n dpb = 2. * np.tan(alpha)\n v = b + ((p - b) / np.linalg.norm(p - b)) * dpb\n x = ((v + b) / np.linalg.norm(v + b)) * dminusbx - b\n\n return x", "def map(self, function):\n pass", "def mapfn(k, v):\n for row in v:\n # completar\n pass", "def mapped(val, in_min, in_max, out_min, out_max):\n return (val - in_min) * (out_max - out_min) / \\\n (in_max - in_min) + out_min", "def map(value, in_low, in_high, out_low, out_high):\n # example from /animation_nodes/nodes/number/map_range.py\n # if inMin == inMax:\n # newValue = 0\n # # with clamping\n # if inMin < inMax:\n # _value = min(max(value, inMin), inMax)\n # else:\n # _value = min(max(value, inMax), inMin)\n # with interpolation\n # newValue = outMin + interpolation(\n # (_value - inMin) / (inMax - inMin)\n # ) * (outMax - outMin)\n # without interpolation\n # newValue = outMin + (\n # (_value - inMin) / (inMax - inMin)\n # ) * (outMax - outMin)\n # # without clamping\n # newValue = outMin + (\n # (value - inMin) / (inMax - inMin)\n # ) * (outMax - outMin)\n # # without clamping - reworded\n # result = (\n # (\n # ((value - in_low) / (in_high - in_low)) *\n # (out_high - out_low)\n # ) + out_low\n # )\n\n result = None\n\n # based on http://arduino.cc/en/Reference/Map\n # and http://stackoverflow.com/a/5650012/574981\n result = (\n (\n ((value - in_low) * (out_high - out_low)) /\n (in_high - in_low)\n ) + out_low\n )\n\n return result", "def map(self, datasetType, dataId):\n\n func = getattr(self, 'map_' + datasetType)\n return func(dataId)", "def mapfn(k, v):\n for row in v:\n # rellenar el codigo\n pass", "def map_reduce(data, emitfunc=lambda rec: [(rec,)], reducefunc=lambda v: v):\n mapped = collections.defaultdict(list)\n for rec in data:\n for emission in emitfunc(rec):\n try:\n k, v = emission\n except ValueError:\n k, v = emission[0], None\n mapped[k].append(v)\n return dict((k, reducefunc(v)) for k, v in mapped.iteritems())", "def _do_mapping(self):\n pass", "def process_seq(seq, mapping):\n return [mapping[w] for w in seq]", "def _map_fn(self):\n raise NotImplementedError", "def expmap(self, v, x, c):\n c = self.truncate_c(c)\n v_norm = self.clip(tf.norm(v, ord=2, axis=-1, keepdims=True))\n second_term = TanC(self._lambda_x(x, c) * v_norm / 2.0, c) * v / v_norm\n gamma = self._mobius_add(x, second_term, c)\n return gamma", "def __call__(self, map_in: np.ndarray) -> np.ndarray:\n return hp.map2alm(maps=map_in, lmax=self.n_max, use_weights=self.use_weights, verbose=self.verbose)", "def map (a_data,a_column,a_old,a_new) :\n loc_new_data = a_data\n a_data[a_column].replace(a_old,a_new,inplace=True)", "def func(x,D,a):\n return D*x**a", "def _prob_map(self, x):\n d = self.manifold.shape[0]\n n = x.shape[1]\n maha = np.zeros(shape=(n, self.m))\n for w in range(self.m):\n mu = np.tile(self.manifold[:, w].reshape(d, 1),\n (1, n))\n maha[:, w] = np.sum((x-mu)**2, 0) / self.sigma**2\n tmp = np.exp(-0.5 * maha)\n return np.exp(-0.5 * maha)", "def product_map(xs1, xs2):\n return jax.vmap(lambda x1: jax.vmap(lambda x2: pair_product(x1, x2))(xs2))(xs1)", "def mapping(x, xp, fp):\n xmin, xmax = xp\n fmin, fmax = fp\n slope = (fmax - fmin) / (xmax - xmin)\n return (x - xmin) * slope + fmin", "def remap(self, value, from1, to1, from2, to2):\n return from2 + (value - from1) * (to2 - from2) / (to1 - from1)", "def spatial_expval(map_):\n map_ = map_ / np.sum(map_)\n x, y = np.meshgrid(np.arange(map_.shape[1]), np.arange(map_.shape[0]))\n\n return np.sum(map_ * x), np.sum(map_ * y)", "def construct_feature_mapping_approx(feature_mapping, weights):\n # here is a function that is created on the fly from the input feature\n # mapping and weights\n def prediction_function(xs):\n designmtx = np.matrix(feature_mapping(xs))\n return linear_model_predict(designmtx, weights)\n # we return the function reference (handle) itself. This can be used like\n # any other function\n return prediction_function", "def map(self, func, *sequences):\n return self.mapper().map(func, *sequences)", "def q_mapping(obs,ctr,scn,nq):\n \n # Calculate quantile locations to be used in the next step\n q_intrvl = 100/float(nq); qtl_locs = np.arange(0,100+q_intrvl,q_intrvl) \n\n # Calculate quantiles\n q_obs = np.percentile(obs, list(qtl_locs), axis=0)\n q_ctr = np.percentile(ctr, list(qtl_locs), axis=0) \n \n if(len(obs.shape)==1):\n # Project the data using the correction function \n return interp_extrap(scn,q_ctr,q_obs)\n \n if(len(obs.shape)==2):\n # Project the data using the correction function, separately for each location \n out = np.full(scn.shape,np.nan)\n for i in range(out.shape[1]):\n out[:,i] = interp_extrap(scn[:,i],q_ctr[:,i],q_obs[:,i])\n\n return out", "def zzX_expand(*polys):\n f = polys[0]\n\n for g in polys[1:]:\n f = zzX_mul(f, g)\n\n return f", "def multDic(dic, x):\n pass", "def matrix_map(self, bkg_reduction=True, data_correction=True):\r\n\r\n if bkg_reduction is True:\r\n if data_correction is True:\r\n data = self.df4\r\n \r\n else:\r\n data = self.df2\r\n\r\n else:\r\n if data_correction is True:\r\n data = self.df3\r\n \r\n else:\r\n data = self.df1\r\n\r\n return data", "def map_values(fun, a_dict):\n return dict((k, fun(v)) for (k, v) in a_dict.items())", "def map(self, function=lambda item: item):\n for i, row in enumerate(self):\n for j, item in enumerate(row):\n row[j] = function(item)", "def mapf( f, C ):\n return (f(x) for x in C)", "def riemann_exp_map(p, v):\n return multiply(p, exp(v))", "def sym_exp_map(cls, q, eta):\n sqrt_q = q ** 0.5\n return sqrt_q * Quaternion.exp(eta) * sqrt_q", "def map(self, obj):\n if isinstance(obj, np.ndarray) and obj.ndim >= 2 and obj.shape[0] in (2,3):\n return fn.transformCoordinates(self, obj)\n else:\n return QtGui.QMatrix4x4.map(self, obj)", "def map(self,Affine,i):\n map_x = np.zeros([self.num,self.d])\n for k in range(self.num):\n map_x[k,:] = Affine.apply(i,self.pick(k))\n Mapped = Model_Points(map_x)\n return Mapped", "def _map_input(self, data: Dict,\n mapping: Optional[Dict]) -> Dict[str, Any]:\n\n if mapping is None:\n return data.copy()\n\n def _map(data, m):\n if isinstance(m, dict):\n # m is a dict {inner_key:outer_key, ...}\n return {k_in: _map(data, k_out) for k_in, k_out in m.items()}\n if isinstance(m, (tuple, list)):\n # m is a list or tuple [outer_key1, outer_key2, ...]\n # This is the case when we collect items from the original\n # data to form a list or tuple to feed to the wrapped\n # transforms.\n return m.__class__(_map(data, e) for e in m)\n\n # allow manually mark a key to be ignored by ...\n if m is ...:\n return IgnoreKey\n\n # m is an outer_key\n if self.allow_nonexist_keys:\n return data.get(m, IgnoreKey)\n else:\n return data.get(m)\n\n collected = _map(data, mapping)\n\n # Retain unmapped items\n inputs = data.copy()\n inputs.update(collected)\n\n return inputs", "def physical_maps(x, y):\n assert x.shape == (3,) and y.shape == (3,)\n assert x.dtype == np.float64 and y.dtype == np.float64\n\n C = np.empty((21,21), dtype=np.float64)\n B = np.empty((2,2), dtype=np.float64)\n b = np.empty((2,), dtype=np.float64)\n _ap.ap_physical_maps(x, y, C, B, b)\n return (C, B, b)", "def _create_subscript_mapping():\n # Create the normal and subscript digits list.\n normal_digits = [i for i in range(10)]\n subscript_digits = [chr(0x2080 + i) for i in range(10)]\n\n # Convert the normal digits to strings.\n normal_digits = [str(i) for i in normal_digits]\n\n # Create a dict mapping the two.\n return DefaultDictionary(zip(normal_digits, subscript_digits))", "def _ExpandResolution(resolution_map):\n # First, clear out all non-terminal answers. (This ensures that this\n # method is idempotent.)\n for answer, probability in resolution_map.items(): # iterate on a copy\n for depth in range(1, len(answer)):\n answer_slice = answer[:depth]\n if answer_slice in resolution_map:\n del resolution_map[answer_slice]\n # Next, add each leading subpath of each answer to the resolution map:\n for answer, probability in resolution_map.items(): # iterate on a copy\n for depth in range(1, len(answer)):\n answer_slice = answer[:depth]\n resolution_map[answer_slice] = (\n resolution_map.get(answer_slice, 0.0) + probability)", "def scalar_mult(s, m):\n for row, column in enumerate(m):\n m[row,column] *= s\n\n return m", "def zzx_expand(*polys):\n f = polys[0]\n\n for g in polys[1:]:\n f = zzx_mul(f, g)\n\n return f", "def map_values(function, dictionary):\n return {k: function(dictionary[k]) for k in dictionary}", "def map_feature(x):\n m, n = x.shape\n out = x\n\n # Add quodratic features.\n for i in range(n):\n for j in range(i, n):\n out = hstack((out, x[:, i].reshape(m, 1) * x[:, j].reshape(m, 1)))\n\n # Add cubic features.\n for i in range(n):\n for j in range(i, n):\n for k in range(j, n):\n out = hstack(\n (out, x[:, i].reshape(m, 1) * x[:, j].reshape(m, 1) * x[:, k].reshape(m, 1)))\n return out", "def _evaluate_map(self, opa: List[int], oha: List[int], opb: List[int],\n ohb: List[int]):\n amap = numpy.zeros((self.lena(),), dtype=numpy.int64)\n bmap = numpy.zeros((self.lenb(),), dtype=numpy.int64)\n apmask = reverse_integer_index(opa)\n ahmask = reverse_integer_index(oha)\n bpmask = reverse_integer_index(opb)\n bhmask = reverse_integer_index(ohb)\n if fqe.settings.use_accelerated_code:\n count = _evaluate_map_each(amap, self._core._astr, self.lena(),\n apmask, ahmask)\n amap = amap[:count]\n count = _evaluate_map_each(bmap, self._core._bstr, self.lenb(),\n bpmask, bhmask)\n bmap = bmap[:count]\n else:\n counter = 0\n for index in range(self.lena()):\n current = int(self._core.string_alpha(index))\n if ((~current) & apmask) == 0 and (current & ahmask) == 0:\n amap[counter] = index\n counter += 1\n amap = amap[:counter]\n counter = 0\n for index in range(self.lenb()):\n current = int(self._core.string_beta(index))\n if ((~current) & bpmask) == 0 and (current & bhmask) == 0:\n bmap[counter] = index\n counter += 1\n bmap = bmap[:counter]\n return amap, bmap", "def prograde(self, fac):\n ret = rmap(\n self.nx * fac, self.dx / fac, ny=self.ny * fac, dy=self.dy / fac)\n\n for i in xrange(0, fac):\n for j in xrange(0, fac):\n ret.map[i::fac, j::fac] = self.map\n\n return ret", "def linearize(self, params, unknowns, resids):\n\n x = hash(params['x'])\n y = params['y']\n J = {}\n\n J['f_xy', 'x'] = 2.0*x - 6.0 + y\n J['f_xy', 'y'] = 2.0*y + 8.0 + x\n return J", "def mapping(s, t, s_new, k,c):\n n, s_dim = s.shape\n t_dim = t.shape[1]\n n_new = s_new.shape[0]\n # 1. determine nearest neighbors\n dist = np.sum((s[np.newaxis] - s_new[:,np.newaxis])**2,-1)\n nn_ids = np.argsort(dist)[:,:k] # change to [:,:k]\n nns = np.row_stack([s[nn_ids[:,ki]] for ki in range(k)])\n nns = nns.reshape((n_new, k, s_dim), order='F')\n # 2 determine gram matris; \n dif = s_new[:,np.newaxis] - nns\n G = np.tensordot(dif,dif,axes=([2],[2]))\n G = G[np.arange(n_new),:,np.arange(n_new)]\n # 3. determine weights not worth vectorizing this \n weights = np.zeros((n_new, k))\n for i_n in range(n_new): \n weights[i_n] = np.linalg.inv(G[i_n]+c*np.eye(k)).dot(np.ones((k,)))\n weights /= np.sum(weights, -1, keepdims=True)\n # 4. compute coordinates\n t_nns = np.row_stack([t[nn_ids[:,ki]] for ki in range(k)])\n t_nns = t_nns.reshape((n_new,k, t_dim), order='F')\n t_new = np.dot(weights, t_nns)\n t_new = t_new[np.arange(n_new), np.arange(n_new)]\n return t_new", "def map_values_c(fun):\n return partial(map_values, fun)", "def mapping_for_switch(mapping):\n return {key[0]: value for key, value in mapping.items()}", "def lookup_transform(self, data, grid=None, method=np.mean, lut=None,\n return_lut=False):\n\n # Input checks\n if grid is None:\n grid = check_crs(data) # xarray\n if not isinstance(grid, Grid):\n raise ValueError('grid should be a Grid instance')\n if hasattr(data, 'values'):\n data = data.values # xarray\n\n # dimensional check\n in_shape = data.shape\n ndims = len(in_shape)\n if (ndims < 2) or (ndims > 4):\n raise ValueError('data dimension not accepted')\n if (in_shape[-1] != grid.nx) or (in_shape[-2] != grid.ny):\n raise ValueError('data dimension not compatible')\n\n if lut is None:\n lut = self.grid_lookup(grid)\n\n # Prepare the output\n out_shape = list(in_shape)\n out_shape[-2:] = [self.ny, self.nx]\n\n if data.dtype.kind == 'i':\n out_data = np.zeros(out_shape, dtype=float) * np.NaN\n else:\n out_data = np.zeros(out_shape, dtype=data.dtype) * np.NaN\n\n def _2d_trafo(ind, outd):\n for ji, l in lut.items():\n outd[ji] = method(ind[l[:, 0], l[:, 1]])\n return outd\n\n if ndims == 2:\n _2d_trafo(data, out_data)\n if ndims == 3:\n for dimi, cdata in enumerate(data):\n out_data[dimi, ...] = _2d_trafo(cdata, out_data[dimi, ...])\n if ndims == 4:\n for dimj, cdata in enumerate(data):\n for dimi, ccdata in enumerate(cdata):\n tmp = _2d_trafo(ccdata, out_data[dimj, dimi, ...])\n out_data[dimj, dimi, ...] = tmp\n\n # prepare output\n if method is len:\n out_data[~np.isfinite(out_data)] = 0\n out_data = out_data.astype(int)\n else:\n out_data = np.ma.masked_invalid(out_data)\n\n if return_lut:\n return out_data, lut\n else:\n return out_data", "def scale4(a,c):\n return [a[0]*c,a[1]*c,a[2]*c,a[3]*c]", "def mapVal(inputPos, in_min, in_max, out_min, out_max):\n \n \n scale = ((out_max - out_min) / (in_max - in_min))\n return float(((inputPos - in_min) * scale) + out_min)", "def _transform_map_data(self):\n WARD_FMT = '%s-%s'\n self.map_data_trans = []\n lookup = {i.column: ''.join(filter(lambda x: x.isdigit(), i.value)) for i in self.sht[1]}\n\n #skip over header\n rs = iter(self.sht.rows)\n next(rs)\n next(rs)\n for r in rs:\n pka = r[0].value\n for c in r[1:]:\n if c.value is None:\n c.value = 0\n\n self.map_data_trans.append((WARD_FMT%(pka, lookup[c.column]), c.value))", "def remap(indices, mapping):\n values = []\n for i in range(0,len(indices)):\n values.append(mapping[indices[i]])\n return values", "def _get_freq_map(min_freq, max_freq, num_freq, dtype=torch.float32):\n if num_freq > 1:\n step = float(max_freq - min_freq) / (num_freq - 1)\n map = torch.arange(start=min_freq,\n end=max_freq + step,\n step=step,\n dtype=dtype)\n return torch.reshape(map, (1, 1, -1, 1))\n elif num_freq == 1:\n return torch.tensor([float(max_freq + min_freq) / 2]).view([1, 1, -1, 1])\n else:\n raise ValueError('num_freq should be positive but we got: {}'.format(num_freq))", "def map_reduce(data, mapper, reducer=None):\n grouped = LazyAggregator()\n reduced = LazyAggregator() if reducer else None\n\n def do_map():\n for item in data:\n yield mapper(item)\n\n def do_reduce():\n # user `iteritems` to grab a fresh iterator\n for key, group in grouped.iteritems():\n yield key, reducer(group)\n\n M = do_map()\n R = do_reduce() if reducer else None\n\n for x in M:\n if x:\n key, value = x\n grouped.append(key, value)\n if R:\n reduced.append(*next(R))\n\n return reduced or grouped", "def scale(inp, ab):\n\n return inp * ab[0] + ab[1]\n # pass", "def scale(inp, ab):\n\n return inp * ab[0] + ab[1]", "def simple_map_2(f, l):\n # Same as above without comprehension:\n mapped_l = []\n for item in l:\n mapped_l.append( f(item) ) # the extra blanks are just for readability\n return mapped_l", "def mapFeaturePlot(x1, x2, degree):\n out = np.ones(1)\n for i in range(1, degree+1):\n for j in range(i+1):\n terms = (x1**(i-j)) * (x2**j)\n out = np.hstack((out, terms))\n\n return out", "def fixed_point_multiply(data, multiplier, shift):\n return _make.fixed_point_multiply(data, multiplier, shift)" ]
[ "0.613614", "0.575563", "0.57504135", "0.57287925", "0.5712245", "0.5601982", "0.55600595", "0.55141425", "0.5502613", "0.54546046", "0.54486644", "0.54435486", "0.5433881", "0.54220146", "0.53886235", "0.53791046", "0.5376159", "0.53648996", "0.5351457", "0.53402036", "0.53352", "0.53312755", "0.532754", "0.52893764", "0.5284848", "0.52720875", "0.52546436", "0.5243955", "0.5231604", "0.5205179", "0.52002954", "0.51973486", "0.51920253", "0.5191187", "0.51900464", "0.51883924", "0.51866055", "0.51780427", "0.517418", "0.5171452", "0.5164852", "0.51469034", "0.5142018", "0.51296014", "0.5122216", "0.50877565", "0.50737685", "0.5051714", "0.50503", "0.5026564", "0.5016968", "0.50126714", "0.5006294", "0.49838737", "0.49793547", "0.49786186", "0.49777353", "0.49709696", "0.49676555", "0.49599284", "0.4953989", "0.49420685", "0.4927973", "0.49274415", "0.49250582", "0.49238613", "0.490798", "0.4907001", "0.49056908", "0.48951626", "0.4894842", "0.48800918", "0.48762694", "0.48687473", "0.486327", "0.48606455", "0.48600572", "0.48551592", "0.48493657", "0.48443592", "0.4837559", "0.4835144", "0.48338017", "0.4831216", "0.48190525", "0.4814939", "0.48137292", "0.48098272", "0.47981772", "0.47838733", "0.4781596", "0.47702256", "0.47685024", "0.47684976", "0.47571126", "0.47554076", "0.4748335", "0.4739709", "0.4734974", "0.47348422", "0.47346845" ]
0.0
-1
Convenience method which overrides the call method to call the getExpansion function
def __call__(self, data): return self.getExpansion(data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getExpansion(self, data):\n pass", "def expansion_method(self, expansion_method):\n\n self._expansion_method = expansion_method", "def expand(self) -> List[TOKEN]:\n return [self.function, *self.args]", "def get_expansion(block, expansion=None):\n if isinstance(expansion, int):\n assert expansion > 0\n elif expansion is None:\n if hasattr(block, 'expansion'):\n expansion = block.expansion\n elif issubclass(block, ViPNAS_Bottleneck):\n expansion = 1\n else:\n raise TypeError(f'expansion is not specified for {block.__name__}')\n else:\n raise TypeError('expansion must be an integer or None')\n return expansion", "def call(self):\n self.call() # Call a function", "def __call__(self):\n return self.fn()", "def __call__(self, *args, **kwargs):\n return self.call(*args, **kwargs)", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def run_whatis(self, expanded, unexpanded) :\n\t\treturn self.run_man(expanded, unexpanded)", "def call(self):", "def run_call(self, expanded, unexpanded) : \n\t\tif not expanded :\n\t\t\treturn self.errormessage(\"Needs an object id to call\")\n\n\t\t# Michel@DC: you should factor the object out of this eval and\n\t\t# validate it with\n\t\t# SecurityManager.checkPermission('View', object).\n\t\t# Also, 'eval' without an namespace qualifying 'in'\n\t\t# clause can be bad! Try and do this without eval.\n\n\t\t# Jerome: Don't know how without eval !\n\t\t# new code looks very ugly and accessing to object's\n\t\t# properties doesn't work anymore, unfortunately.\n\n\t\tobjectstr = string.join(unexpanded, ' ')\n\t\tpos = string.find(objectstr, '(')\n\t\tif pos == -1 :\n\t\t\t# called without arguments\n\t\t\tobjpath = objectstr\n\t\t\tobjargs = \"\"\n\t\telse :\n\t\t\t# called with arguments, skip them\n\t\t\t# because we only want the object name\n\t\t\tobjpath = objectstr[:pos]\n\t\t\tobjargs = objectstr[pos:]\n\n\t\tobjpath = string.replace(objpath, '.', '/')\n\t\tobject = self.toObject(self.__context, objpath)\n\t\tif object is None :\n\t\t\t# maybe should do something to re-allow properties to be used\n\t\t\treturn self.errormessage(\"Object %s not found\" % objectstr)\n\t\telse :\n\t\t\tif not self.HasPerms(object, 'View') :\n\t\t\t\treturn -1\n\t\t\telse :\n\t\t\t\t_ = context = self.__context\n\t\t\t\tcallresult = str(eval(\"object%s\" % objargs))\n\t\t\t\tself.printf(\"%s\" % callresult)\n\t\t\t\tself.htmlmessage(callresult, safe=1)", "def get_expansion(block, expansion=None):\n if isinstance(expansion, int):\n assert expansion > 0\n elif expansion is None:\n if hasattr(block, 'expansion'):\n expansion = block.expansion\n elif issubclass(block, BasicBlock):\n expansion = 1\n elif issubclass(block, Bottleneck):\n expansion = 4\n else:\n raise TypeError(f'expansion is not specified for {block.__name__}')\n else:\n raise TypeError('expansion must be an integer or None')\n return expansion", "def test_get_systems_expanded(self):\n pass", "def call(self) -> global___Snippet.ClientCall:", "def call(self) -> global___Snippet.ClientCall:", "def __call__(self):\n context = Context()\n return self.recipe.execute(context, self.cmd, self.cmd_args)", "def expand(self, element:Element, context:ExpansionContext):\n\n raise NotImplementedError()", "def __call__(self, *args, **kw):\n return self.transform(term.__call__(self, *args, **kw))", "def test_call_wrapped_function(self):\r\n\r\n # adapted from test_app.test_cd_hit.test_cdhit_clusters_from_seqs\r\n\r\n exp = {'0': 'R27DLI_4812',\r\n '1': 'U1PLI_7889',\r\n '2': 'W3Cecum_4858',\r\n '3': 'R27DLI_3243',\r\n }\r\n app = GenericRepSetPicker(params={'Algorithm': 'most_abundant',\r\n 'ChoiceF': make_most_abundant, 'ChoiceFRequiresSeqs': True})\r\n obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath)\r\n self.assertEqual(obs, exp)", "def run_help(self, expanded, unexpanded) :\n\t\treturn self.run_man(expanded, unexpanded)", "def __call__(self, *arg, **kwargs):\n return self._fun(*arg, **kwargs)", "def call(self, *args, **kwargs):", "def get_expansion(self, prec, padic_num):\n padic_expansion = list(padic_num.expansion())\n if isinstance(padic_expansion[0], list):\n return padic_expansion\n else:\n # Eistenstein extension case.\n padic_list = []\n for i in range(0, len(padic_expansion), 2):\n term = [padic_expansion[i]]\n padic_list.append(term)\n\n # Fill the rest of the list to the sufficient precision.\n for i in range(prec - len(padic_list)):\n padic_list.append([]) \n return padic_list", "def __call__(self, *args, **kwargs):\n return self._func(*args, **kwargs)", "def call(self, **kwargs):\n return getattr(self.resource, self.function)(**kwargs)", "def display(self):\n from sage.tensor.modules.format_utilities import FormattedExpansion\n from sage.misc.latex import latex\n resu_txt = str(self.parent()._chart[:]) + ' |--> ' + \\\n str(ExpressionNice(self._express))\n resu_latex = latex(self.parent()._chart[:]) + r' \\mapsto' + \\\n latex(ExpressionNice(self._express))\n return FormattedExpansion(resu_txt, resu_latex)", "def __call__(self, *args, **kwargs):\n return self.call(*args, **kwargs)", "def __call__(self, *args, **kwargs):\n return function(args, addtional_info)", "def __getattr__(self, name):\n def func(*args, **kwargs):\n # Python gives arguments as a tuple, convert them to list.\n f = getattr(self.obj, name)\n if not callable(f):\n return f\n\n # Print the function call as it would be written in code.\n a = ', '.join([str(x) for x in args])\n kw = dict_to_parameters(kwargs)\n if len(kw):\n kw = ', ' + kw\n print('Calling \\'%s(%s%s)\\'..' % (name, a, kw))\n\n if self.prompt:\n raw_input(self.guide_text)\n\n response = None\n if not self.emulate:\n response = f(*args, **kwargs)\n print '->', response\n\n return response\n\n return func", "def expand_call(kargs):\n func = kargs['func']\n del kargs['func']\n out = func(**kargs)\n return out", "def expand(self):\n data, end = \\\n self.pat.traverse(lambda obj, *args: args,\n self.begin, self.data)\n return data", "def __call__(self, *args, **kwargs):\n return self.func(*args, **kwargs)", "def __call__(self, *args, **kwargs):\n return self.func(*args, **kwargs)", "def test_expand_func(self):\n self.assertEqual([\"test\", [\"a1\", \"\\\"a b\\\"\", \"f(w,x)\"]],\n grammar._EXPAND_FUNC.parseString(\"$test(a1, \\\"a b\\\", f(w,x))\").asList())", "def __call__(fun_name):", "def expand(self, *args, **kwargs):\n\t\tif hasattr(self.parent, \"queriedTable\"):\n\t\t\treturn self.parent.queriedTable.expand(*args, **kwargs)\n\t\telse:\n\t\t\treturn self.parent.rd.expand(*args, **kwargs)", "def __create_expansion(term: ast.FunctionApplicationASTNode,\n definition: ast.DefineFunCommandASTNode) -> ast.TermASTNode:\n assert term.get_declaration().get_name() == definition.get_fun_name()\n parm_names = [x for x, _ in definition.get_formal_parameters()]\n defining_term = definition.get_child_nodes()[0]\n if len(parm_names) > 0:\n arg_terms = term.get_child_nodes()\n parm_bindings = zip(parm_names, arg_terms)\n assert isinstance(defining_term, ast.TermASTNode)\n result = ast.LetTermASTNode(list(parm_bindings), defining_term)\n defining_term_clone = defining_term.clone({definition: result}, dict())\n result.set_enclosed_term(defining_term_clone)\n return result\n else:\n return defining_term.clone(dict(), dict())", "def run_apropos(self, expanded, unexpanded) :\n\t\treturn self.run_man(expanded, unexpanded)", "def _call(self, x):\n return x.inner(x)", "def _call_impl(self, t):\n return self.interp(t)", "def _access(self, wanted, opt, value, fullname=_is_macroarg):\n result = (\n wanted[opt]._access(value),\n fullname,\n wanted[opt]\n )\n del wanted[opt]\n return result", "def __call__(value):", "def __getitem__(self,cle):\n return self.F(*cle)", "def __call(self, **kwargs):\n return self.__call_api(kwargs)", "def expand(self, file_name):\n # Create a program file to store the data and code from the test case\n # files.\n program = self.raw_file_reader.split_data_and_code(file_name)\n # Process the program object to calculate the data locations and\n # expand the programs given.\n program = self.process_program_object(program)\n # Create the memory contents from the expanded program object.\n return self.create_memory_contents(program)", "def __call__(self, *args, **kwargs):\n return self.method(*args, **kwargs)", "def __call__(self, *args, **kwargs):\n return self.f(*args, **kwargs)", "def __call__(object):", "def register_expansion(library_node: LibraryNode, expansion_name: str):\n def expander(exp: ExpandTransformation):\n result = expansion(exp)\n library_node.register_implementation(expansion_name, exp)\n return result\n\n return expander", "def __call__(self):\n return self.referee()", "def _call(self, args):\n a = args.split(' ', 1)\n if a:\n getattr(self, a[0])(*a[1:])", "def static_call(self, *args):\n return self.expression", "def __call__(self) -> Operation:\n if self.action:\n logger.debug(self.long_description)\n self.action(*self.args, **self.kwargs) # type: ignore\n return self", "def _generateExpandableState(self, obj, **args):\n result = []\n if not args.get('mode', None):\n args['mode'] = self._mode\n args['stringType'] = 'expansion'\n indicators = self._script.formatting.getString(**args)\n state = obj.getState()\n if state.contains(pyatspi.STATE_EXPANDABLE):\n if state.contains(pyatspi.STATE_EXPANDED):\n result.append(indicators[1])\n else:\n result.append(indicators[0])\n return result", "def __call__(self, *args, **kw):\n raise NotImplementedError(\"failed monkey-patch: material stacker needs\"\n \" to replace __call__ in Scatterer\")", "def __getattr__(self, key):\n if key in self.cmdmanager:\n action = self.cmdmanager[key]\n def call(*args):\n try:\n action(self, *args)\n except:\n traceback.print_exc()\n return call\n try:\n return self.__getattr__(key)\n except:\n return self.__getattribute__(key)", "def work(self):\n\n cmd = self.options.command\n cmdargs = self.options.args\n\n # find function\n fname = \"cmd_\" + cmd.replace('-', '_')\n if not hasattr(self, fname):\n self.log.error('bad subcommand, see --help for usage')\n sys.exit(1)\n fn = getattr(self, fname)\n\n b = inspect.signature(fn).bind(*cmdargs)\n\n fn(*b.args, **b.kwargs)", "def func(self):\n from evennia.utils.utils import string_suggestions, list_to_string\n\n msg = \"Command '%s' is not available.\" % self.raw\n cmdset = self.cmdset\n cmdset.make_unique(self.caller)\n all_cmds = [cmd for cmd in cmdset if cmd.auto_help and cmd.access(self.caller)]\n names = []\n for cmd in all_cmds:\n # noinspection PyProtectedMember\n names.extend(cmd._keyaliases)\n suggestions = string_suggestions(self.raw, set(names), cutoff=0.7)\n if suggestions:\n msg += \" Maybe you meant %s?\" % list_to_string(\n suggestions, \"or\", addquote=True\n )\n else:\n msg += ' Type \"help\" for help.'\n self.msg(msg)", "def run(self):\n method = self.getMethod()\n fce = getattr(self, method)\n return fce(*self.argv[1:])", "def __call__(self, *args):\n\n func_env = Environment(self.parent)\n self.define_args(func_env, *args)\n return evaluate(self.body, func_env)", "def __getattr__(self, method_name):\n return partial(self.exec, method_name.replace(\"_\", \" \"))", "def _calc_interaction_expansion(self):\n # preevaluate expansions for volume and surface phase functions\n # this returns symbolic code to be then further used\n\n volexp = self.V.legexpansion(self.t_0, self.t_ex,\n self.p_0, self.p_ex,\n self.geometry).doit()\n\n brdfexp = self.SRF.legexpansion(self.t_0, self.t_ex,\n self.p_0, self.p_ex,\n self.geometry).doit()\n\n # preparation of the product of p*BRDF for coefficient retrieval\n # this is the eq.23. and would need to be integrated from 0 to 2pi\n fPoly = expand(2 * sp.pi * volexp * brdfexp)\n\n # do integration of eq. 23\n expr = self._integrate_0_2pi_phis(fPoly)\n\n # now we do still simplify the expression to be able to express\n # things as power series of cos(theta_s)\n theta_s = sp.Symbol('theta_s')\n replacements = [(sp.sin(theta_s) ** i,\n expand((1. - sp.cos(theta_s) ** 2)\n ** sp.Rational(i, 2)))\n for i in range(1, self.SRF.ncoefs + self.V.ncoefs - 1)\n if i % 2 == 0]\n\n res = expand(expr.xreplace(dict(replacements)))\n\n return res", "def test_system_expansion():\n # We need 2 combinatorial systems\n template_script = get_template_script()\n template_system = template_script['systems']['implicit-system']\n del template_system['leap']\n template_script['systems'] = {'system1': template_system.copy(),\n 'system2': template_system.copy()}\n template_script['systems']['system1']['receptor'] = utils.CombinatorialLeaf(['Abl', 'T4Lysozyme'])\n template_script['systems']['system2']['ligand'] = utils.CombinatorialLeaf(['p-xylene', 'toluene'])\n template_script['experiments']['system'] = utils.CombinatorialLeaf(['system1', 'system2'])\n\n # Expected expanded script\n expected_script = yank_load(\"\"\"\n systems:\n system1_Abl: {receptor: Abl, ligand: p-xylene, solvent: GBSA-OBC2}\n system1_T4Lysozyme: {receptor: T4Lysozyme, ligand: p-xylene, solvent: GBSA-OBC2}\n system2_pxylene: {receptor: T4Lysozyme, ligand: p-xylene, solvent: GBSA-OBC2}\n system2_toluene: {receptor: T4Lysozyme, ligand: toluene, solvent: GBSA-OBC2}\n experiments:\n system: !Combinatorial ['system1_Abl', 'system1_T4Lysozyme', 'system2_pxylene', 'system2_toluene']\n protocol: absolute-binding\n \"\"\")\n expanded_script = template_script.copy()\n expanded_script['systems'] = expected_script['systems']\n expanded_script['experiments'] = expected_script['experiments']\n\n assert ExperimentBuilder(template_script)._expand_systems(template_script) == expanded_script", "def expand(self, *vars):\n arg0 = self.args[0]\n if (not vars) or arg0.has(*vars):\n op = type(self)\n arg0 = arg0.expand(*vars)\n if isinstance(arg0, Add):\n arg0a, arg0b = arg0.args\n return (op(arg0a) + op(arg0b)).expand(*vars)\n if isinstance(arg0, Multiply):\n arg0a, arg0b = arg0.args\n return (op(arg0a)*arg0b + arg0a*op(arg0b)).expand(*vars)\n return self", "def __call__(self):", "def __call__(self):", "def __getattr__ (self, name) :\n\t\treturn functools.partial( self.run, name )", "def expansion(self, niters=-1):\n _cgco.gcoExpansion(self.handle, np.intc(niters), self.energyTempArray)\n return self._convertEnergyBack(self.energyTempArray[0])", "def expand_callable(self, call_expr):\n call_expr.func = ast.Attribute(value=call_expr.func, attr='__call__')", "def __call__(self, *args, **kw):\n return self.callable(*args, **kw)", "def setExpanded(self):", "def call(self):\n\n self.cross()\n self.mutation()\n self.selection()\n \n return self.population[0]", "def eval(self):\n pass", "def eval(self):\n pass", "def eval(self):\n pass", "def __call__(self, *args, **kwargs):\n dprint(2, \"StencilMetadata::__call__\", self.func.__name__, args, kwargs)\n self.compile()\n return self.sfunc(*args, **kwargs)", "def __call__(self, population, context):\n pass", "def __call__(self):\n issue_deprecation_warning('Referencing this attribute like a function',\n 'it directly', since='6.2')\n\n return self", "def __call__( self ):\n pass", "def __call__( self, *args, **kw ):\n return self.run( *args, **kw )", "def cmd(self):", "def test_expansion(checklist, index, stored, expanded):\r\n self.assertEqual(get_action_url(checklist, index), stored)\r\n expanded_checklist = expand_checklist_action_url(self.course, checklist)\r\n self.assertEqual(get_action_url(expanded_checklist, index), expanded)\r\n # Verify no side effect in the original list.\r\n self.assertEqual(get_action_url(checklist, index), stored)", "def get_expand(self):\n\n return self.props[\"expand\"]", "def expansion_structure(self):\n return self.cluster_subspace.expansion_structure", "def __call__(self, x):", "def __call__(self, *args, **kwargs):\n return self.get(*args, **kwargs)", "def __call__(self, *args, **kwargs):\n return self.get(*args, **kwargs)", "def __call__(self, args):", "def expansion_steps(self):\n return self._p", "def func(self):\n caller = self.caller\n\n if not self.args or not self.recipe:\n self.caller.msg(\"Usage: craft <recipe> from <ingredient>, ... [using <tool>,...]\")\n return\n\n ingredients = []\n for ingr_key in self.ingredients:\n if not ingr_key:\n continue\n obj = caller.search(ingr_key, location=self.caller)\n # since ingredients are consumed we need extra check so we don't\n # try to include characters or accounts etc.\n if not obj:\n return\n if (\n not inherits_from(obj, \"evennia.objects.models.ObjectDB\")\n or obj.sessions.all()\n or not obj.access(caller, \"craft\", default=True)\n ):\n # We don't allow to include puppeted objects nor those with the\n # 'negative' permission 'nocraft'.\n caller.msg(\n obj.attributes.get(\n \"crafting_consumable_err_msg\",\n default=f\"{obj.get_display_name(looker=caller)} can't be used for this.\",\n )\n )\n return\n ingredients.append(obj)\n\n tools = []\n for tool_key in self.tools:\n if not tool_key:\n continue\n # tools are not consumed, can also exist in the current room\n obj = caller.search(tool_key)\n if not obj:\n return None\n if not obj.access(caller, \"craft\", default=True):\n caller.msg(\n obj.attributes.get(\n \"crafting_tool_err_msg\",\n default=f\"{obj.get_display_name(looker=caller)} can't be used for this.\",\n )\n )\n return\n tools.append(obj)\n\n # perform craft and make sure result is in inventory\n # (the recipe handles all returns to caller)\n result = craft(caller, self.recipe, *(tools + ingredients))\n if result:\n for obj in result:\n obj.location = caller", "def test_calls(self):\n ex = self.ex\n m = self.m\n n = self.n\n\n nreps = random.randint(1, 10)\n ex.nreps = nreps\n ex.vary[\"X\"][\"with\"].add(\"rep\")\n ex.infer_lds()\n\n cmds = ex.generate_cmds()\n\n idx = random.randint(0, nreps - 1)\n self.assertIn([\"name\", m, n, \"X_%d\" % idx, m, \"Y\", m, \"Z\", n], cmds)", "def explore(self, *args):", "def __call__(self, *args, **kwargs):\n with self._lock:\n (concrete_function,\n filtered_flat_args) = self._maybe_define_function(args, kwargs)\n return concrete_function._call_flat(\n filtered_flat_args, captured_inputs=concrete_function.captured_inputs) # pylint: disable=protected-access", "def __call__(self, *args, **kw):\n return self.transform(Term.__call__(self, *args, **kw))", "def __getitem__(self, cmd):\n assert isinstance(cmd, str)\n name = cmd[len(self.predicate) :]\n # Check that command is valid and not private,\n # protected or special method and attribute for it exists\n if (\n cmd.startswith(self.predicate)\n and not cmd.startswith(self.predicate + \"_\")\n and hasattr(self, name)\n ):\n item = self.__getattribute__(name)\n if callable(item):\n return item\n # If command not found, return help\n return partial(self.help, fail=\"No such command\")", "def func(*args, **kwargs):\n return call(*args, **kwargs) # pylint: disable = E1102" ]
[ "0.74891585", "0.5920144", "0.54512185", "0.5370277", "0.53289384", "0.5261651", "0.5257707", "0.5253625", "0.5253625", "0.5253625", "0.5253625", "0.5253625", "0.525186", "0.5245593", "0.51983786", "0.5187937", "0.51806074", "0.51613045", "0.51613045", "0.5146855", "0.51071787", "0.50719315", "0.5056144", "0.50152665", "0.50062287", "0.50025994", "0.49969396", "0.49856463", "0.4985423", "0.49798384", "0.497648", "0.4957752", "0.49435785", "0.49413586", "0.49252543", "0.4915208", "0.4915208", "0.48917618", "0.4858511", "0.4857312", "0.48520312", "0.48458296", "0.48390657", "0.48386452", "0.48291504", "0.48263234", "0.4820251", "0.4812735", "0.48089772", "0.48032725", "0.48029038", "0.48003116", "0.47957337", "0.47893", "0.47886643", "0.47822195", "0.47821617", "0.47672448", "0.4750125", "0.47497967", "0.47340804", "0.47200298", "0.4703916", "0.4684181", "0.46529084", "0.46508348", "0.4627604", "0.46186966", "0.46178538", "0.46178538", "0.46134916", "0.46080542", "0.46008122", "0.45936492", "0.45825598", "0.4578036", "0.45739412", "0.45739412", "0.45739412", "0.45730355", "0.45724094", "0.45692846", "0.45691085", "0.45679292", "0.45672667", "0.45602497", "0.45597056", "0.455765", "0.4555349", "0.45504084", "0.45504084", "0.45477924", "0.4524181", "0.45193776", "0.45164093", "0.4513841", "0.45131332", "0.45118377", "0.4511137", "0.45103014" ]
0.775161
0
Saves a copy of the database into the tmp directory. Modify this code directly if needed, as it hardwires the username, db name and filename.
def mysqldump(): run("mysqldump -u database_user database_name -p > ~/tmp/exported_db.sql")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def backup():\n backup_shift(os, config.utils.tasks.backup_depth)\n if config.utils.tasks.secret_key is None:\n shutil.copyfile(config.core.database_name, config.core.database_name+'.1')\n else:\n data = get_encrypted_database()\n with open(config.core.database_name+'.1', 'wb') as f:\n f.write(data)", "def backup_database():\n db_path = os.path.join(config.cum_dir, 'cum.db')\n backup_path = os.path.join(config.cum_dir, 'cum.db.bak')\n copyfile(db_path, backup_path)", "def saveDbToDisk (self):\n\n currentThread=threading.currentThread()\n self._logIo(\"save-db-to-disk\").debug1(\"starting to save db for instance %s. thread-id=%d\", self._instanceName, currentThread.ident)\n\n # We dump the dict to Json format\n try:\n a.infra.format.json.writeToFile(self._logIo, self._cidLastAccessTimeDict, self._dbFileFullNamePath)\n except Exception as ex:\n self._logIo(\"error-write-db-file\").error(\"error writing db to file='%s'. exception: %s\", self._dbFileFullNamePath, ex)\n\n # We dump prediction counters for presistency\n try:\n a.infra.format.json.writeToFile(self._logIo, self.counters, self._countersFileFullNamePath)\n except Exception as ex:\n self._logIo(\"error-write-counters-file\").error(\"error writing counters to file='%s'. exception: %s\", self._countersFileFullNamePath, ex)\n \n return self._dbFileFullNamePath, self._dbFailedToRemoveFileFullNamePath", "def copy_db():\n with cd(\"/tmp\"), lcd(\"/tmp\"):\n sudo(\"pg_dump gsi > /tmp/latest.sql\", user=\"postgres\")\n run(\"tar zcvf latest.sql.tgz latest.sql\")\n get(\"/tmp/latest.sql.tgz\", \"latest.sql.tgz\")\n sudo(\"rm /tmp/latest.sql.tgz /tmp/latest.sql\")", "def saveDatabase(database,user):\n pickle.dump(user, open(\"Users/\"+user.key, \"wb\"))", "def web_backup():\n conf = config.utils\n if conf.tasks.secret_key is None:\n upload_path = config.core.database_name\n file = None\n else:\n file = tempfile.NamedTemporaryFile(delete=False)\n file.write(get_encrypted_database())\n file.close()\n upload_path = file.name\n\n factory = ftplib.FTP_TLS if conf.tls else ftplib.FTP\n # noinspection PyDeprecation\n with ftputil.FTPHost(conf.ftp.host, conf.ftp.username, conf.ftp.password,\n session_factory=factory, use_list_a_option=False) as host:\n backup_shift(host, conf.tasks.web_backup_depth)\n host.upload(upload_path, config.core.database_name+'.1')\n if file is not None:\n os.unlink(file.name)", "def save_db(self):\n logger.log('DEBUG', f'Saving results to database')\n lock.acquire()\n db = Database()\n db.create_table(self.domain)\n db.save_db(self.domain, self.results, self.source)\n db.close()\n lock.release()", "def backup_database():\n logger.info(\"start database_backup\")\n management.call_command('dbbackup', compress=True)\n logger.info(\"end database_backup\")", "def tempdb():\n fd, minitwit.app.config['DATABASE'] = tempfile.mkstemp()\n minitwit.init_db()\n try:\n yield\n finally:\n os.close(fd)\n os.unlink(minitwit.app.config['DATABASE'])", "def save_db(self) -> None:", "def save(self):\r\n debug.write(\"[SourceRPG] Handling SQL Save\", 1)\r\n if self.path != \":memory:\":\r\n debug.write(\"Path is not in memory\", 2, False)\r\n if currentTurboMode is False:\r\n debug.write(\"We are not in turbo mode\", 2, False)\r\n self.connection.commit()\r\n debug.write(\"[SourceRPG] SQL Save handled\", 1)", "def save_db(self) -> None:\n self.connection.commit()", "def backup_database(db_host=None, db_name=None, cfg='project'):\n data = __salt__['mc_project.get_configuration'](cfg)\n db = data['data']['django_settings']['DATABASES']['default']\n if not db_host:\n db_host = db['HOST']\n if not db_name:\n db_name = db['NAME']\n dump_filename = '/tmp/{0}-{1}.dump'.format(\n db_name,\n datetime.now().strftime('%Y-%m-%d-%H-%M'))\n script = BACKUP.format(**locals())\n script += \"exit $?\\n\"\n ret = run(host=db_host, script=script)\n if ret['retcode']:\n pprint(ret)\n raise Exception('dump failed')\n return dump_filename", "def mysql_import():\n # first make another copy of the db\n run(\"mysqldump -u database_user database_name -p > ~/tmp/exported_db_temp.sql\")\n # then import from the backup\n run(\"mysql -u database_user -p -D database_name < ~/tmp/exported_db.sql\")", "def reset_database_to_default(self):\n _src = os.path.abspath(self.default_db)\n _dst = os.path.abspath(self.db_path)\n copyfile(_src, _dst)", "def backup_database(self):\n backup_file = \"{}-{}.sql\".format(\n config.DATABASE_NAME, datetime.today().strftime(\"%Y-%m-%d--%H%M\")\n )\n backup_uri = \"{}/{}\".format(config.DATABASE_BACKUP_BUCKET, backup_file)\n step = \"Backing Up Database:\\nbackup={}\".format(backup_uri)\n try:\n self.slacker.send_thread_reply(step)\n backup_command = [\n \"gcloud\",\n \"sql\",\n \"export\",\n \"sql\",\n config.DATABASE_INSTANCE_NAME,\n backup_uri,\n \"--database={}\".format(config.DATABASE_NAME),\n \"--verbosity=debug\",\n ]\n subprocess.run(backup_command, check=True)\n except Exception as e:\n self.raise_step_error(step=step, error=e)", "def __upload_data(self):\n data_path = \"database\"\n os.makedirs(data_path, exist_ok=True)\n try:\n conn = sqlite3.connect('database/customers.db')\n query = '''CREATE TABLE IF NOT EXISTS all_customers_database (\n first_name TEXT, second_name TEXT,\n gender TEXT, account_type TEXT, account_number INTEGER PRIMARY KEY UNIQUE NOT NULL,\n account_password VARCHAR, account_balance REAL );'''\n #Create table\n cursor = conn.cursor()\n print(\"Connection sucessful\")\n cursor.execute(query)\n conn.commit()\n print(\"Table created\")\n #Insert a row to a database\n insert_query ='''INSERT INTO all_customers_database\n (first_name, second_name, gender, account_type, account_number, account_password, account_balance)\n VALUES \n (?, ?, ?, ?, ?, ?, ?);'''\n conn.execute(insert_query, (self.first_name, self.second_name, self.gender, self.account_type, self.account_number, self.account_password, self.account_balance))\n print(\"Your details saved successfully.\")\n except sqlite3.Error as err:\n # print(\"Error while creating a sqlite table \", err)\n print(\"Error creating database\")\n finally:\n if conn:\n conn.close()\n # print(\"Sqlite connection closed.\")", "def create(self, db_name):\n path = self.get_path(db_name)\n if not os.path.exists(path):\n fp = open(path, 'wb')\n fp.close()\n self.connect(db_name)\n self.make()\n else:\n self.connect(db_name)", "def create_db_from_scratch():\n if os.path.isfile('data.db'):\n os.remove('data.db')\n Base.metadata.create_all(engine)", "def save(self):\n\t\tself.CONFIG.save()\n\t\tself.temp_files.save()", "def mock_db(tmpdir_factory):\n filename = str(tmpdir_factory.mktemp(\"data\").join(\"test.db\"))\n create_test_db(filename)\n return filename", "def safeSave(self):\n self.fileInfo.makeBackup()\n filePath = os.path.join(self.fileInfo.dir,self.fileInfo.name)\n tempPath = filePath+'.tmp'\n self.save(tempPath)\n renameFile(tempPath,filePath)\n self.fileInfo.setMTime()\n self.fileInfo.extras.clear()", "def copy_db():\n local('ssh %s pg_dump -U djangoproject -c djangoproject | psql djangoproject' % env.hosts[0])", "def create_new_db():\n global data_base, table\n data_base = asksaveasfilename(title=\"Select file\", filetypes=((\"DATA BASE\", \"*.db\"), (\"all files\", \"*.*\")),\n defaultextension='.db')\n\n if Path(data_base).suffix == '.db':\n create_win_create_table()\n else:\n mistake_db_file()", "def createdb(dbname):\n os.system(\"createdb -w %s\" % dbname)", "def backup_database(cls):\n with open('db_backup.csv', 'w', newline='') as csvfile:\n fieldnames = ['product_name',\n 'product_price',\n 'product_quantity',\n 'date_updated']\n writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n\n writer.writeheader()\n for product in Product.select().dicts():\n writer.writerow(\n {'product_name': product['product_name'],\n 'product_price': product['product_price'],\n 'product_quantity': product['product_quantity'],\n 'date_updated': product['date_updated']})\n print('Database backup complete! File was saved as \"db_backup.csv\".\\n')", "def export_database(self):\n base_path = QtWidgets.QFileDialog.getSaveFileName(self, 'Save File', filter='CSV (*.csv)')\n database.export_to_csv(DB_PATH, base_path[0])", "def write(self):\n db_handle = open(settings.DATA_PATH, 'wb')\n cPickle.dump(dict(self), db_handle)\n db_handle.close()", "def save(file_path = \"database.pkl\"):\n\n with open(file_path, 'wb') as f:\n pickle.dump(person_database, f, protocol=pickle.HIGHEST_PROTOCOL)\n print(\"Person database saved to \" + file_path)", "def save_database(app):\n app.database().save()\n app.status.message('Finished saving..')", "def locate_db(self):\r\n full_path = os.path.join(APP_DATA, \r\n 'Google\\\\Chrome\\\\User Data\\\\Default\\\\Login Data')\r\n temp_path = os.path.join(APP_DATA,'sqlite_file')\r\n if os.path.exists(temp_path): os.remove(temp_path)\r\n shutil.copyfile(full_path, temp_path)\r\n return full_path", "def close(self, *args, **kwargs):\n super(DatabaseWrapper, self).close(*args, **kwargs)\n\n signature_version = self.settings_dict.get(\"SIGNATURE_VERSION\", \"s3v4\")\n s3 = boto3.resource(\n 's3',\n config=botocore.client.Config(signature_version=signature_version),\n )\n\n try:\n with open(self.settings_dict['NAME'], 'rb') as f:\n fb = f.read()\n\n m = hashlib.md5()\n m.update(fb)\n if self.db_hash == m.hexdigest():\n logging.debug(\"Database unchanged, not saving to remote DB!\")\n return\n\n bytesIO = BytesIO()\n bytesIO.write(fb)\n bytesIO.seek(0)\n\n s3_object = s3.Object(self.settings_dict['BUCKET'], self.settings_dict['REMOTE_NAME'])\n result = s3_object.put('rb', Body=bytesIO)\n except Exception as e:\n logging.debug(e)\n\n logging.debug(\"Saved to remote DB!\")", "def exportDB(self):\n sourcesession=svc.connect(self.__source,accessMode=coral.access_Update)\n destsession=svc.connect(self.__dest,accessMode = coral.access_Update)\n try:\n dbcp=DBCopy(sourcesession,destsession,1024)\n if self.__all:\n dbcp.copyDB()\n elif self.__inv:\n dbcp.copyInventory()\n elif len(self.__tree) != 0:\n dbcp.copyTrees([self.__tree])\n del sourcesession\n del destsession\n except Exception, e:\n print str(e)\n del sourcesession\n del destsession", "def createDB():\n print(\"::creating db\")\n filepath = confighome+\"config\"\n\n # open config to get credentials for ssh \n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n creds=jconfig[0]\n\n # ssh in make a directory, initialize it with 'git --bare' \n cmd=\"ssh \"+creds['db']['username']+\"@\"+creds['db']['host']\n cmd_sqrd=\" 'if ! cd swrss_database > /dev/null 2>&1 ; then mkdir swrss_database; cd swrss_database ; fi ; git init --bare ;'\"\n cmd_full=cmd+cmd_sqrd\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)\n if retval != 0:\n print(\"::error encountered. Make sure you have stored your remote's info in the config\")\n\n # locally clone the \"db\"\n cmd_full=\"git clone \"+creds['db']['username']+\"@\"+creds['db']['host']+\":swrss_database\"\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)", "def save(self):\n if self.hasChanged:\n filePath = self.path\n tempPath = filePath+'.tmp'\n fileDir = os.path.split(filePath)[0]\n if not os.path.exists(fileDir): os.makedirs(fileDir)\n cPickle.dump(self.data,open(tempPath,'w'))\n renameFile(tempPath,filePath,True)\n self.hasChanged = False", "def write_db(db):\n\n # Look for database in the same folder as this script\n script_dir = os.path.dirname(os.path.realpath(__file__))\n db_filepath = os.path.join(script_dir, 'cn_loads_database.dat')\n\n with open(db_filepath, 'w') as f:\n f.write(yaml.dump(db, default_flow_style=False))", "def db():\n\n db_obj = dump_db.DumpDB()\n db_obj.load_from_csv(CONF.BACKUP_DB_PATH)\n return db_obj", "def dump_DB(self):\n\t\tprint 'Dumping Data Base...'\n\t\tp=cPickle.Pickler(open(self.DB_file, 'wb'))\n\t\tp.fast=True\n\t\tp.dump(self.DB)\n\t\tprint 'Dumping completed'\n\t\t#stream.close()\n\t\t#return ", "def _dump_remote_db(c):\n env = c.config\n timestamp = datetime.datetime.now().strftime(\"%Y%m%d_%Hh%Mm%Ss\")\n dump_filename_base = \"{project_name}-{file_key}-{timestamp}.sql\"\n file_key = env.verbose_name\n dump_dir = env.db_dump_dir\n database_name = env.db_name\n file_key = \"{}-full\".format(file_key)\n\n dump_filename = dump_filename_base.format(\n project_name=env.project_name,\n file_key=file_key,\n timestamp=timestamp\n )\n\n backup_location = os.path.join(\n dump_dir, dump_filename\n )\n\n with Connection(env.hosts, user=env.user, config=c.config) as c:\n\n c.run(\n 'echo Dumping {} database...'.format(env.verbose_name)\n )\n c.run(\n 'mysqldump --defaults-file={defaults_file} '\n '{database_name} > {backup_location}'.format(\n defaults_file=env.mysql_defaults_file,\n database_name=database_name,\n backup_location=backup_location\n )\n )\n return backup_location", "def create_db(self, path: str) -> None:\n if os.path.isfile(path):\n self.db_path = path\n print(\"DB already exists\")\n return\n\n print(path)\n\n self.db_path = path\n\n print(\"Opening the base db\")\n with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'basedb.xml'), 'r') as f:\n base = f.read()\n print(\"Reading the base as {0}\".format(base))", "def task_backup_sqlite_database():\n sqlite_dir = os.path.abspath(settings.BASE_DIR)\n os.chdir(sqlite_dir)\n existing_files = glob.glob(\"*.sqlite3*\")\n backup_helper = BackupHelper()\n\n # Check how many existing backups there are.\n # If none, then throw warning\n if len(existing_files) == 0:\n logger.warning(\"Cannot perform backup as no database file currently exists\")\n return\n\n # If one, then do the backup.\n elif len(existing_files) == 1:\n backup_helper.make_backup()\n\n # If two (or more, for whatever reason), then backup and remove oldest\n elif len(existing_files) >= 2:\n backup_helper.make_backup()\n backup_helper.remove_older_backups()\n\n logger.info(\"Database backed up\")", "async def save(self, job, options=None):\n if options is None:\n options = {}\n\n if not options.get('secretseed'):\n bundle = False\n filename = '/data/freenas-v1.db'\n else:\n bundle = True\n filename = tempfile.mkstemp()[1]\n os.chmod(filename, 0o600)\n with tarfile.open(filename, 'w') as tar:\n tar.add('/data/freenas-v1.db', arcname='freenas-v1.db')\n tar.add('/data/pwenc_secret', arcname='pwenc_secret')\n\n def read_write():\n with open(filename, 'rb') as f:\n f2 = os.fdopen(job.write_fd, 'wb')\n while True:\n read = f.read(1024)\n if read == b'':\n break\n f2.write(read)\n f2.close()\n await self.middleware.run_in_thread(read_write)\n\n if bundle:\n os.remove(filename)", "def saveDatabase():\r\n debug.write(\"saveDatabase processing\", 1)\r\n \"\"\" Only process if turbo mode is off \"\"\"\r\n if not currentTurboMode:\r\n debug.write(\"turbo mode off, process the save\", 1)\r\n \"\"\" Update all the player's stats gained and commit the database\"\"\"\r\n for player in players:\r\n debug.write(\"Commiting indivudal players to the virtual database: %s\" % player.name, 2)\r\n player.commit()\r\n debug.write(\"Attempting to save the database itself\", 1)\r\n database.save()\r\n debug.write(\"SQLite database saved\", 1)\r\n debug.write(\"Creating the event\", 1)\r\n \"\"\" Create and fire the event \"\"\"\r\n values = {\"type\":(\"setstring\", str(saveType))}\r\n gamethread.delayed(0, fireEvent, (\"sourcerpg_databasesaved\", values))\r\n debug.write(\"Event fired\", 1)\r\n \r\n \"\"\" Create a loop if we need to \"\"\"\r\n if str( saveType ) == \"intervals\":\r\n gamethread.delayedname(float(saveLength), 'sourcerpg_databasesave', saveDatabase)\r\n debug.write(\"saveDatabase processed\", 1)", "def bulk_copy_to_db(self):\n database = PostgreSQLCommon()\n\n try:\n file = open(self.file_name_hash)\n database.bulk_copy(file, self.storage_table)\n\n m.info('Bulk insert from %s has been successfully completed!'\n % self.file_name_hash)\n except Exception as err:\n m.error('OOps! Bulk insert operation FAILED! Reason: %s' % str(err))\n finally:\n database.close()\n\n if os.path.exists(self.file_name_hash):\n os.remove(self.file_name_hash)", "def save_whole_db_as_db(self, filename, seperate_structures=False):\n\n if not seperate_structures:\n outPath = self.outDIR+'/'+filename\n new_db = sqlite3.connect(outPath)\n cur = new_db.cursor()\n cur.execute(\"ATTACH DATABASE ':memory:' AS pdb_db\")\n new_db.close()", "def save_settings(path, server, station):\n db.save_data(path, server, station)", "def writeToDB(self, eventDateTime, eventFileName, eventType, eventPath):\n conn = self.createConnection()\n c = conn.cursor()\n\n c.execute(\"INSERT INTO RansomedFiles (TIME, EventFileName, EventType, EventPath) VALUES (?,?,?,?)\", (eventDateTime, eventFileName, eventType, eventPath))\n conn.commit()\n conn.close()\n\n # print(\"[+]Wrote to the database successfully!\")", "def save_to_tmp(form):\n file = request.files.get('file')\n suffix = os.path.splitext(secure_filename(file.filename))[-1]\n tf = tempfile.NamedTemporaryFile(dir='/tmp', delete=False, suffix=suffix, prefix='lpm_tmp_')\n filepath = tf.name\n tf.close()\n file.save(filepath)\n form.tmpname.data = os.path.basename(filepath)\n return filepath", "def __get_db(self, folder):\n db_dir = os.path.join(self.home, self.ibooks_doc_root, folder)\n db_fullname = None\n\n if not os.path.exists(self.tmp_dir):\n os.makedirs(self.tmp_dir)\n\n for dfile in os.listdir(db_dir):\n src = os.path.join(db_dir, dfile)\n dst = os.path.join(self.tmp_dir, dfile)\n shutil.copy(src, dst)\n if dfile.endswith(\".sqlite\"):\n db_fullname = dst\n \n return db_fullname", "def set_db_file():\n\n return os.path.join(db_path, db_file)", "def save_and_exit():\n con.commit()\n con.close()\n quit()", "def save(self):\n \n f = file(self.conf_file, \"w\")\n f.write(header + \"\\n\".join(map(str, self.db)) + \"\\n\")\n f.close()", "def run(self):\n\t\tlogger.info(\"Uploading data... @ %f, PID: %d\" % (time.time(), os.getpid()))\n\n\t\tself.dump_db()", "def dump_testdb(c, dbname=\"test_template\", fpath=\"tests/test_db.sql\"):\n default_env = {\n \"PATH\": os.environ[\"PATH\"],\n \"LANG\": \"en_US.UTF-8\",\n }\n\n env = os.environ\n env.update(default_env)\n\n c.run(f\"pg_dump -h localhost -p 5432 -U postgres {dbname} > {fpath}\", env=env)", "def database(request):\n with tempfile.TemporaryDirectory() as tmpdir:\n with zipfile.ZipFile(os.path.join(tmpdir, 'data.zip'), 'x') as datazip:\n datazip.write(settings.DATABASES['default']['NAME'], arcname='db.sqlite3')\n response = FileResponse(open(os.path.join(tmpdir, 'data.zip'), 'rb'))\n response['Content-Disposition'] = ('attachment; filename=\"db.sqlite3.zip\"')\n return response", "def save_user_db(file_path: str, user_db: dict) -> None:\n # In case directory is not yet existed.\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n with open(file_path, 'w') as f:\n print(f\"[I/O] Saving user data...{file_path}\")\n json.dump(user_db, f)", "def command(database, filename):\n\n click.secho(\n \"Backing up the database '{database}' on host '{host}' to file '{filename}'...\".format(\n database=settings.DATABASES[database]['NAME'],\n host=settings.DATABASES[database]['HOST'],\n filename=filename,\n )\n )\n # Make sure the backup path exists\n backup_path = get_backup_path()\n if not os.path.exists(backup_path):\n os.makedirs(backup_path)\n\n os.environ[\"PGPASSWORD\"] = settings.DATABASES[database]['PASSWORD']\n os.system(\n 'pg_dump -Fc -c -x -h {host} -U {username} --file={filename} {database}'.format(\n host=settings.DATABASES[database]['HOST'],\n username=settings.DATABASES[database]['USER'],\n database=settings.DATABASES[database]['NAME'],\n filename=filename,\n )\n )\n os.environ[\"PGPASSWORD\"] = ''", "def reload_db(self):\n if not settings.DATABASE_ENGINE in ['sqlite3', 'postgresql_psycopg2']:\n return None\n # Close connection to cleanly swap databases.\n connection.close()\n if settings.DATABASE_ENGINE == 'sqlite3':\n shutil.copyfile(self.db_backup_path, self.db_path)\n if settings.DATABASE_ENGINE == 'postgresql_psycopg2':\n # Establish a temporal connection to template1 database and\n # recreate TEST_DB_NAME.\n connection.settings_dict[\"DATABASE_NAME\"] = 'template1'\n cursor = connection.cursor()\n connection.creation.set_autocommit()\n cursor.execute(\"DROP DATABASE IF EXISTS %s\" % self.db_name)\n cursor.execute(\"CREATE DATABASE %s WITH TEMPLATE %s_backup\" % (\n self.db_name, self.db_name))\n connection.close()\n # Change the connection to the new test database.\n settings.DATABASE_NAME = self.db_name\n connection.settings_dict[\"DATABASE_NAME\"] = self.db_name\n # Get a cursor (even though we don't need one yet). This has\n # the side effect of initializing the test database.\n connection.cursor()\n return True", "def create_db():\r\n\r\n try:\r\n os.remove(proc_loc + 'SF_Parking.db')\r\n print(\"Legacy DB deleted\")\r\n except:\r\n pass\r\n disk_engine = create_engine('sqlite:///'+ proc_loc +'SF_Parking.db')\r\n return sqlite3.connect(proc_loc + 'SF_Parking.db')", "def save(self):\n logging.debug(\"sychronizing db\")\n self._db.sync()", "def settings_db_create(db_name=SETTINGS_DB_NAME, force=False):\n if not force and os.path.isfile(db_name):\n user_choice = input('Do you really want to drop database ? Type \"yes\" to continue\\n ')\n if not user_choice.isalpha() or not user_choice.lower() == 'yes':\n return False\n\n # DB creation logic goes here\n with lite.connect(db_name) as con:\n cur = con.cursor()\n cur.execute('DROP TABLE IF EXISTS PATHS')\n cur.execute(\"CREATE TABLE PATHS \"\n \"( LOGISIM_HOME VARCHAR NOT NULL,\\\n GRADING_PATH VARCHAR NOT NULL,\\\n IMPORT_PATH VARCHAR,\\\n GRADES_DB VARCHAR); \")\n cur.execute(\"CREATE TABLE LOCAL (\\\n GRADER_NAME VARCHAR,\\\n YEAR INT,\\\n SEMESTER CHAR (1),\\\n USE_STYLE BOOLEAN,\\\n SYNC_COMMAND VARCHAR);\")\n con.commit()\n return True", "def save(self, db):\n pass", "def save_user(username, data):\n\n hashed_username = base64.b64encode(Cryptography.hash(username).digest()).decode()\n\n file = open(getcwd() + Database.__DB_FILENAME, 'a')\n iv, ciphered_data = Cryptography.cipher(Cryptography.get_passphrase(), data)\n file.write(hashed_username + ':' + ciphered_data.hex() + '.' + iv.hex() + '\\n')\n file.flush()\n file.close()", "def _create_db(self):\n self.db = easydms.dbcore.Database(\":memory:\")\n self.db.create_db()", "def sync_db():\n pass", "def save_file_as(self, widget):\n\n\t\tdialog = Gtk.FileChooserDialog(\"Please choose a filename\", None,\n\t\t\tGtk.FileChooserAction.SAVE,\n\t\t\t(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,\n\t\t\t Gtk.STOCK_SAVE, Gtk.ResponseType.OK))\n\n\n\t\tdialog.set_filename(\"project\")\n\t\tfile_filters.add_filter_database(dialog)\n\n\t\tresponse = dialog.run()\n\t\tif response == Gtk.ResponseType.OK:\n\t\t\tfile_selected = dialog.get_filename()\n\t\t\ttry:\n\t\t\t\tshutil.copy(self.engine.database.db_loc, file_selected)\n\t\t\texcept: pass\n\t\t\t\n\t\telif response == Gtk.ResponseType.CANCEL:\n\t\t\tdialog.destroy()\n\n\t\tdialog.destroy()", "def save(self):\n self.__db.commit()", "def create_database(self):\n # FIXME(jlvillal) to work with Ironic\n raise Exception(\"Does not work\")\n\n if self.needs_database:\n conf_dir = os.path.join(self.test_dir, 'etc')\n safe_mkdirs(conf_dir)\n conf_filepath = os.path.join(conf_dir, 'ironic-manage.conf')\n\n with open(conf_filepath, 'w') as conf_file:\n conf_file.write('[DEFAULT]\\n')\n conf_file.write('sql_connection = %s' % self.sql_connection)\n conf_file.flush()\n\n ironic_db_env = 'IRONIC_DB_TEST_SQLITE_FILE'\n if ironic_db_env in os.environ:\n # use the empty db created and cached as a tempfile\n # instead of spending the time creating a new one\n db_location = os.environ[ironic_db_env]\n os.system('cp %s %s/tests.sqlite' %\n (db_location, self.test_dir))\n else:\n # FIXME(jlvillal) what is the correct command????\n cmd = ('%s -m ironic.cmd.manage --config-file %s db sync' %\n (sys.executable, conf_filepath))\n utils.execute(cmd)\n\n # copy the clean db to a temp location so that it\n # can be reused for future tests\n (osf, db_location) = tempfile.mkstemp()\n os.close(osf)\n os.system('cp %s/tests.sqlite %s' %\n (self.test_dir, db_location))\n os.environ[ironic_db_env] = db_location\n\n # cleanup the temp file when the test suite is\n # complete\n def _delete_cached_db():\n try:\n os.remove(os.environ[ironic_db_env])\n except Exception:\n # FIXME(jlvillal) We should log this\n raise NotImplementedError\n # logger.exception(\n # \"Error cleaning up the file %s\" %\n # os.environ[ironic_db_env])\n\n atexit.register(_delete_cached_db)", "def save_tmp_file(self, data):\n with open(self.tmp_file, 'wb') as f:\n f.write(data)", "def save(self):\n self.db.commit()", "def save_data(df, database_filename):\n engine = create_engine('sqlite:///' +database_filename)\n df.to_sql('Project2', engine, index=False)", "def create_test_db(filename: str):\n database = sqlite3.connect(filename)\n cursor = database.cursor()\n # Create the tables\n cursor.executescript(open(DATABASE_TABLE_CREATE, \"r\").read())\n # Insert data\n cursor.executescript(open(DATABASE_DATA_CREATE, \"r\").read())\n # Save (commit) the changes\n database.commit()", "def __store(self):\n # connection strings are accessed directly by dbo\n dbo = dbo.connect()\n dbo.save(self.__to_dict())\n # not supre important to call but a nice idea\n dbo.destroy()", "def Save(self) -> None:\n self.__conn.commit()", "def generate_sqlite_db_path():\n tmp_dir = str(tempfile.mkdtemp())\n abspath = os.path.abspath( # noqa: PTH100\n os.path.join( # noqa: PTH118\n tmp_dir,\n \"sqlite_db\"\n + \"\".join(\n [random.choice(string.ascii_letters + string.digits) for _ in range(8)]\n )\n + \".db\",\n )\n )\n return abspath", "def store(self, filename):", "def export(self):\n f = open(self.database, 'w')\n for line in self.conn.iterdump():\n f.write(line)\n self.c.close()", "def connect_dataBase(db_dir, create_cmd):\n just_created = False #flag of is the db is already exist\n \n if not os.path.isfile(db_dir):\n #create the db file in the directory\n with open(db_dir , 'w') as f:\n just_created = True\n #print 'database handler created -- ' , db_dir \n\n try:\n conn = lite.connect(db_dir)\n except lite.Error, e:\n #print \"Error %s:\" % db_dir\n sys.exit(1)\n finally:\n if just_created:\n #create the table \n create_dataBase(conn, create_cmd)\n return True", "def create_db(self):", "def db():\n the_db.session.close()\n the_db.drop_all()\n the_db.create_all()\n return the_db", "def save_database(user: User, slug: str, path: str, history: str = '') -> None:\n LOG.debug(\"Path: %s, history: %s, slug: %s\", path, history, slug)\n\n if path.startswith('/discuss'):\n path = path[len('/discuss'):]\n path = path[path.index('/') if '/' in path else 0:]\n\n db_issues = DBDiscussionSession.query(Issue).all()\n slugs = [issue.slug for issue in db_issues]\n if not any([slug in path for slug in slugs]) or slug not in path:\n path = '/{}/{}'.format(slug, path)\n\n if len(history) > 0:\n history = '?{}={}'.format(ArgumentationStep.HISTORY.value, history)\n\n LOG.debug(\"Saving %s%s\", path, history)\n DBDiscussionSession.add(History(author=user, path=path + history))\n DBDiscussionSession.flush()", "def make_db():\n\n db.create_all()", "def db_small_path():\n return os.path.join(_here, 'fixtures/databases/db-small/database')", "def prepare_db():\n conn = sqlite.connect(\"temp.db\")\n sql = conn.cursor()\n sql.execute(\"SELECT sql FROM sqlite_master WHERE name='points'\")\n rows = sql.fetchall()\n if len(rows) == 0:\n print \"Database does not exist. Creating Database...\"\n sql.execute('''CREATE TABLE points\n (date datetime, humidity real, temp_c real, temp_f real, index_c real, index_f)''')\n print \"Database created\"\n conn.close()", "def db_file():\n return abspath('vmchecker.db')", "def save():", "def write_img_to_db():\n with lite.connect(\"test.db\") as con:\n cur = con.cursor()\n data = read_image_from_fs()\n binary = lite.Binary(data)\n cur.execute(\"INSERT INTO Images(Data) VALUES (?)\", (binary,))", "def destroy(self):\n self.close()\n if self.wantAnyDbm:\n lt = time.asctime(time.localtime())\n trans = maketrans(': ','__')\n t = lt.translate(trans)\n head, tail = os.path.split(self.filepath)\n newFileName = 'UDStoreBak'+t\n if os.path.exists(self.filepath):\n try:\n os.rename(tail, newFileName)\n uber.air.writeServerEvent('Uberdog data store Info', 0 \\\n , 'Creating backup of file: %s saving as: %s' %(tail, newFileName))\n except:\n uber.air.writeServerEvent('Uberdog data store Info', 0 \\\n , 'Unable to create backup of file: %s ' %tail)\n else:\n # Remove the filename with all sufix's\n # .bak, .dir, .dat\n files = os.listdir(head)\n for file in files:\n if file.find(tail)>-1:\n filename, ext = os.path.splitext(file)\n try:\n os.rename(file, newFileName+ext)\n uber.air.writeServerEvent('Uberdog data store Info', 0 \\\n , 'Creating backup of file: %s saving as: %s' %(file,newFileName+ext))\n except:\n uber.air.writeServerEvent('Uberdog data store Info', 0 \\\n , 'Unable to create backup of file: %s ' %newFileName+ext)\n else:\n if os.path.exists(self.filepath + '.bu'):\n os.remove(self.filepath + '.bu')\n if os.path.exists(self.filepath):\n os.remove(self.filepath)", "def GetDatabase(self):\r\n\r\n if self.database:\r\n return self.database\r\n \r\n if not os.path.exists(self.GetDataDir()):\r\n # Create the data folder, it still doesn't exist\r\n os.makedirs(self.GetDataDir())\r\n\r\n self.database = os.path.join(self.GetDataDir(), \"NDT_Database.db\")\r\n return self.database", "def _restore_orig_directory(self):\n if not self._is_temp_dir:\n return\n self._base_data_dir = self._orig_base_data_dir\n del self._orig_base_data_dir\n self._base_logs_dir = self._orig_base_logs_dir\n del self._orig_base_logs_dir\n self.db.change_path(self._base_data_dir / \"projects.db\")\n self.set_current(\"default\", update=False)\n self._is_temp_dir = False", "def new_database(app):\n app.status.message(\"Opening a folder..\")\n path = app.dialog.directory(\"Select a folder for the new database..\")\n if path == '':\n app.status.message('') \n return\n app.status.cursorToHourglass()\n app.close()\n folder = db.database(path=path, \n status = app.status, \n dialog = app.dialog)\n app.display(folder)\n app.status.hide()\n app.status.cursorToNormal()", "def getDBPath():\n return os.path.join(CONFIG_DIR, CONFIG_DICT['common']['local_db'])", "def get_db_path():\n \n return(db_run.db_abs_path)", "def tmp_backup(path):\n if not _os.path.isfile(path):\n raise FileNotExistError(path)\n tmpdir = _tempfile.mkdtemp()\n _shutil.copy2(path, tmpdir)\n return _os.path.join(tmpdir, _os.path.basename(path))", "def disconnect(self):\n self._save_database()\n self.dbh.close()\n self._remove_temporary_file()", "def init_db(self):\n if self.is_client() or not self.is_responsible_validator():\n return\n\n ip, _ = self.experiment.get_peer_ip_port_by_id(self.my_id)\n\n self.db_path = os.path.join(\"/tmp\", \"postgres-data\", ip)\n shutil.rmtree(self.db_path, ignore_errors=True)\n os.makedirs(self.db_path, exist_ok=True)\n\n os.system(\"/usr/lib/postgresql/11/bin/initdb %s > postgres.out\" % self.db_path)", "def setUp(self):\n self.dbfile = tempfile.NamedTemporaryFile(delete=False)\n self.path = self.dbfile.name\n self.db = Tkvdb(self.path)", "def save(self):\n files = {\n \"project_id\" : self.project_id,\n \"name\": self.name,\n \"description\": self.description,\n \"link\": self.link,\n \"created_by\": self.created_by\n }\n query = \"\"\"INSERT INTO files (project_id, name, description, link, created_by) \\\n VALUES (%(project_id)s, %(name)s, %(description)s, %(link)s, %(created_by)s);\n \"\"\"\n self.cur.execute(query, files)\n self.db.commit()\n self.cur.close()\n return \"Success\"", "def create_db(db_file):\n try:\n conn = sqlite3.connect(db_file)\n cursor = conn.cursor()\n cursor.execute(CREATE_VISITORS_SQL)\n cursor.execute(CREATE_SETTINGS_SQL)\n cursor.execute(ADD_PASS_SQL)\n conn.commit()\n conn.close()\n app.logger.info(\"Database commit successful\")\n except Error as e:\n print(e)\n raise", "def create(db):\n if exists(db):\n print(\"phonebook %r already exists\" % db)\n sys.exit(-1)\n else:\n database = {}\n pickle.dump(database, open(db, 'wb'))\n print(\"created phonebook %r in the current directory\" % db)" ]
[ "0.6738473", "0.64771056", "0.6374693", "0.6369591", "0.62350726", "0.6213593", "0.6164571", "0.60522014", "0.6035333", "0.60280454", "0.60020876", "0.6000187", "0.59944266", "0.5981501", "0.5978801", "0.5921293", "0.58182704", "0.5779628", "0.5761551", "0.5757", "0.57395315", "0.5710081", "0.5706372", "0.5705235", "0.5685833", "0.5680208", "0.5679067", "0.56571555", "0.5644417", "0.56423223", "0.5639725", "0.5635036", "0.5611749", "0.5609987", "0.5603998", "0.55995953", "0.5598774", "0.55940753", "0.55809", "0.555201", "0.5531179", "0.5527691", "0.5511015", "0.5504783", "0.55002534", "0.5500105", "0.54982996", "0.54901373", "0.5473761", "0.54520446", "0.5447999", "0.5438375", "0.5421258", "0.54158455", "0.53838336", "0.5350633", "0.53464687", "0.5342961", "0.53408325", "0.53185534", "0.531136", "0.53101516", "0.529678", "0.5296701", "0.52822655", "0.5277076", "0.52699286", "0.5261172", "0.5261046", "0.5257748", "0.52553874", "0.5244682", "0.5243954", "0.52426916", "0.5236245", "0.5233531", "0.52319133", "0.52306044", "0.52245796", "0.52210414", "0.5198996", "0.5190395", "0.51864576", "0.51802915", "0.51578784", "0.5155272", "0.5153618", "0.5150364", "0.51487076", "0.51479286", "0.5141792", "0.5135638", "0.51331204", "0.511803", "0.5115699", "0.51150894", "0.51106787", "0.51091576", "0.5108735", "0.5104545" ]
0.63280445
4
To save some output text and time, if you know only one app has changed its database structure, you can run this with the app's name.
def deploy(app_to_migrate=""): mysqldump() # backup database before making changes with cd(code_dir): run("git pull") run(python_add_str + "python manage.py migrate %s" % app_to_migrate) run(python_add_str + "python manage.py createinitialrevisions") # only if using reversion run(python_add_str + "python manage.py collectstatic --noinput") run("../apache2/bin/restart")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def django_sql(appname):\r\n app = wingapi.gApplication\r\n cmdline, dirname, err = _get_base_cmdline()\r\n if err is not None:\r\n title = _(\"Failed to Generate SQL\")\r\n msg = _(\"Could not generate SQL: %s\") % err\r\n app.ShowMessageDialog(title, msg)\r\n return\r\n cmdline += ['sql', appname]\r\n err, output = app.ExecuteCommandLine(cmdline, dirname, None, 5.0, return_stderr=True)\r\n if err != 0:\r\n if err == 1:\r\n reason = _(\"Failed to start sub-process\")\r\n else:\r\n reason = _(\"Sub-process timed out\")\r\n title = _(\"Failed to Generate SQL\")\r\n msg = _(\"Could not generate SQL: %s\") % reason\r\n out = _get_output(output)\r\n if out:\r\n msg += '\\n\\n' + out\r\n app.ShowMessageDialog(title, msg)\r\n else:\r\n editor = app.ScratchEditor(_(\"Django SQL\"), 'text/x-sql')\r\n doc = editor.GetDocument()\r\n doc.SetText(_get_output(output, pfx=('', ''), verbose=1))", "def appstr(app):\n ...", "def get_name():\n return config.APP_NAME", "def fallback_application_name() -> str:\n # Import here instead of at the top to avoid an ImportError caused by an\n # import cycle. This can be removed once the import graph of id3c.cli is\n # less tangled.\n from ..cli.utils import running_command_name\n\n # \"The application_name can be any string of less than NAMEDATALEN\n # characters (64 characters in a standard build).\"¹\n #\n # psycopg2 / libpq will truncate for us, but they will issue a NOTICE log\n # message if they do. Avoid the cluttery notice by truncating ourselves.\n #\n # ¹ https://www.postgresql.org/docs/current/runtime-config-logging.html#GUC-APPLICATION-NAME\n max_len = 64\n appname = running_command_name()\n\n return shorten(appname, max_len, \"...\")", "def print_app_version(app_name):\n print_file('{}/current/version.txt'.format(get_app_basedir(app_name)))", "def appdata(appname):\n z = Zap(appname)\n z.appdata(stdout=True)", "def app_title():\n print(\"*\" * 27)\n print(\" Stock App\")\n print(\"*\" * 27)", "def app_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"app_name\")", "def _get_app_name(app):\n return app[APP_NAME_KEY]", "def _get_app_name(self):\n # TODO move app name into pyglet.app (also useful for OS X menu bar?).\n return sys.argv[0]", "def save_database(app):\n app.database().save()\n app.status.message('Finished saving..')", "def db(action):\n if action not in commands:\n return 'Available commands: %s' % list(commands.keys())\n from app.db import DB\n db = DB(app)\n db.execute_sql(commands[action])\n return 'Database %sed' % action", "def get_app_name(i):\n return app_id + '-' + str(i)", "def getApp(self, args):\n \n try:\n return args[2]\n except:\n raise ArgsException, '3rd argument should be the application(path) for creating the sql backup'", "def db_for_write(self, model, **hints):\r\n if model._meta.app_label == self.APP_LABEL:\r\n return self.DB_NAME\r\n return None", "def run_dbname(self, expanded, unexpanded) :\n\t\tif expanded :\n\t\t\treturn self.errormessage(\"Doesn't need any argument\")\n\t\tself.htmlmessage(self.__context.Control_Panel.db_name(), printable=1)", "def prog_info( self ):\r\n fll = AppGlobal.force_log_level\r\n logger = self.logger\r\n\r\n a_str = \"\"\r\n if ( self.no_restarts == 0 ) :\r\n\r\n a_str = f\"{a_str}\\n\"\r\n a_str = f\"{a_str}\\n\"\r\n a_str = f\"{a_str}\\n============================\"\r\n a_str = f\"{a_str}\\n\"\r\n\r\n a_str = f\"{a_str}\\nRunning {self.app_name} version = {self.version} mode = {self.parameters.mode}\"\r\n a_str = f\"{a_str}\\n\"\r\n #logger.log( fll, a_str )\r\n\r\n # ================================\r\n # logger.log( fll, \"\" ) # not really critical but want to show up would a number be better ?\r\n # logger.log( fll, \"\" )\r\n # logger.log( fll, \"============================\" )\r\n # logger.log( fll, \"\" )\r\n\r\n # logger.log( fll, \"Running \" + self.app_name + \" version = \" + self.version + \" mode = \" + self.parameters.mode )\r\n # logger.log( fll, \"\" )\r\n\r\n else:\r\n #a_str = \"\"\r\n a_str = f\"{a_str}\\n======\"\r\n a_str = f\"{a_str}\\nRestarting {self.app_name} version = {self.version} mode = {self.parameters.mode}\"\r\n a_str = f\"{a_str}\\n======\"\r\n\r\n if len( sys.argv ) == 0:\r\n a_str = f\"{a_str}\\nno command line arg \"\r\n else:\r\n for ix_arg, i_arg in enumerate( sys.argv ):\r\n a_str = f\"{a_str}\\ncommand line arg { ix_arg } = {sys.argv[ix_arg]}\"\r\n\r\n a_str = f\"{a_str}\\ncurrent directory {os.getcwd()}\"\r\n # a_str = f\"{a_str}\\nCOMPUTERNAME {os.getenv( 'COMPUTERNAME' )}\" # may not exist in now in running on\r\n logger.log( fll, a_str )\r\n\r\n logger.log( fll, f\"{self.parameters}\" )\r\n\r\n a_str = self.parameters.running_on.get_str()\r\n logger.log( fll, a_str )\r\n # next may not be best way or place\r\n # self.parameters.running_on.log_me( logger, logger_level = AppGlobal.force_log_level, print_flag = True )\r\n\r\n start_ts = time.time()\r\n dt_obj = datetime.datetime.utcfromtimestamp( start_ts )\r\n string_rep = dt_obj.strftime('%Y-%m-%d %H:%M:%S')\r\n logger.log( fll, f\"Time now: {string_rep}\" ) # but logging includes this in some format\r\n\r\n return", "def os_start_db( self, ):\r\n pass", "def app_name(self):\n return self._app_name", "def app_name(self): # pylint:disable=function-redefined\n return self._app_name", "def app_name(self) -> str:\n return self._app_name", "def set_program_name(program_name):\n global _PROGRAM_NAME\n _PROGRAM_NAME = program_name", "def _app(self) -> str:\n return self.charm.app.name", "def db_for_write(self, model, **hints):\n if model._meta.app_label == self.app_label:\n return self.db_name\n return None", "def find_program(name):\r\n return name", "def app_name(self, value):\n self._app_name = value", "def __str__(self):\n return self.program_name", "def main():\n data = get_db()\n myCursor = data.cursor()\n myCursor.execute(\"SELECT * FROM users\")\n description = [desc[0] for desc in myCursor.description]\n\n logger = get_logger()\n\n for user in myCursor:\n userInfo = \"\".join(\n f'{des}={str(usr)}; ' for usr, des in zip(user, description)\n )\n logger.info(userInfo)\n\n myCursor.close()\n data.close()", "def command_dbtool(self):\n dbtool.main(*self.args())", "def db_for_write(self, model, **hints):\n state_db = self._db_name(model)\n if state_db in settings.DATABASES:\n name = state_db\n else:\n name = 'default'\n logger.debug('db_for_write({}): {}'.format(state_db, name))\n return name", "def application_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"application_name\")", "def application_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"application_name\")", "def main():\n create_all_views()\n outlog_q1 = question_1()\n outlog_q2 = question_2()\n outlog_q3 = question_3()\n\n db.close()\n\n generateLog(outlog_q1, outlog_q2, outlog_q3)", "def get_db_name(self):\n\t\treturn conf.db_name", "def appdata(self) -> str:\n return os.path.join(self.appdata_dir, f'{self.appdata_token}.sql')", "def verbose_app_label(request):\n # import ipdb; ipdb.set_trace()\n \n # iterate through the app_list looking for a corresponding app with\n # a VERBOSE_APP_LABEL\n \n return {}", "def db_for_read(self, model, **hints):\r\n if model._meta.app_label == self.APP_LABEL:\r\n return self.DB_NAME\r\n return None", "def main():\n db = _db.Database(experiment.ORACLE_PATH)\n db.populate_kernel_names_table()\n db.commit()", "def getDatabaseName(self):\n return f\"n{self.name.capitalize()}\"", "def _app_id(self):\n return '{}-{}'.format(self.config['app']['name'],\n self.config['app']['version'])", "def get_app_info(self, name):\n with hide(\"output\", \"running\"):\n result = local(\"redis-cli -h {host} -p 6379 -n {db} hgetall {name}\".format(\n host=self.host, name=name, db=REDIS_APPLICATION_DB_NUM), capture=True)\n\n if len(result.stdout) > 0:\n splits = result.stdout.split(\"\\n\")\n fmt_result = dict([(splits[i], splits[i+1])\n for i in range(0, len(splits), 2)])\n pp = pprint.PrettyPrinter(indent=2)\n pp.pprint(fmt_result)\n return fmt_result\n else:\n warn(\"Application \\\"%s\\\" not found\" % name)\n return None", "def update_urls_file(self, app_name):\n\n logger.info(\"\\n--------------------------------------------------------\\n\\t\\tRefreshing application list in urls.py\")\n copyfile(settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py\", settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py.backup\")\n t = loader.get_template('applicationManager/applicationFileTemplates/project_urls_py.txt')\n\n apps = Application.objects.all()\n\n c = {'applist': apps}\n rendered = t.render(c)\n open(settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py\", \"w+\").write(rendered)", "def getApplicationName(self) -> unicode:\n ...", "def name():\n code = \"get name of current screen saver\"\n return applescript.tell.app(\"System Events\", code).out", "def init_data_file_name():\n now = datetime.datetime.now().isoformat().split('.')[0].replace(':', '-')\n filename = 'show-commands-' + now + \".txt\"\n return filename", "def new_app(project_name,app_name ):\n from flask_create_app.core.commands.cmd_newapp import create_new_app\n proj_dir = os.getcwd()\n create_new_app(app_name, proj_dir,project_name)", "def _unknown_app(self):\n self.make_unknown()", "def get_name(self, name):\n return self.apps[name]['name']", "def get_name(app):\n from uuid import uuid4 as uuid\n return (f'accelpy_{app[\"application\"][\"product_id\"]}'\n f'_{str(uuid()).replace(\"-\", \"\")[:8]}')", "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n SaveView(\"Snapshot-View-0\")", "def main():\n print(\"Call your main application code here\")", "def main():\n print(\"Call your main application code here\")", "def main():\n print(\"Call your main application code here\")", "def app_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"app_name\")", "def process_app_info(self):\n pass", "def get_app_name(self):\n return getattr(self, '_app_name', None)", "def save_program(self, content):\n self.create_file_directory_if_not_exists(self, self.path)\n\n files = len(os.listdir(self.path))\n name = \"program\".__add__(str(files+1).__add__(\".py\"))\n file = open(self.path.__add__(name), 'w')\n file.write(content.decode())\n file.close()\n return name", "def exe(self, name):\n\n return name", "def db_for_read(self, model, **hints):\n if model._meta.app_label == self.app_label:\n return self.db_name\n return None", "def set_name(self, application_name):\r\n self._name = application_name", "def main():\n discovered_path = AskFile(0, '*.db', 'Select the discovered database')\n if discovered_path is None:\n return\n\n with open(discovered_path, 'rb') as f:\n functions = pickle.load(f)\n\n rename_functions(functions)", "def db_for_write(self, model, **hints):\n if model._meta.app_label == 'eotrts_student':\n return 'eotrts_db'\n elif model._meta.app_label == 'essential_english_words_1':\n return 'essential_english_db'\n return None", "def get_runinfo_basename():\n return \"dumpruninfo\"", "def db_for_read(self, model, **hints):\n state_db = self._db_name(model)\n if state_db in settings.DATABASES:\n name = state_db\n else:\n name = 'default'\n logger.debug('db_for_read({}): {}'.format(state_db, name))\n return name", "def app():\n articles_list = get_popular_articles()\n authors_list = get_popular_authors()\n rates_list = get_days_rate()\n file = open('results.txt', 'w+')\n file.write('Most Popular Articles\\n')\n file.write('%s\\n' % articles_list)\n file.write('\\n')\n file.write('Most Popular Authors\\n')\n file.write('%s\\n' % authors_list)\n file.write('\\n')\n file.write('Days with error rate > 1%\\n')\n file.write('%s\\n' % rates_list)", "def test_app_is_created(app):\n assert app.name == \"myapp.app\"", "def main():\n print get_latest_data()", "def _generateApplicationName(self, obj, **args):\n result = []\n try:\n result.append(obj.getApplication().name)\n except:\n pass\n return result", "def getName():\n\n tcflush(sys.stdin, TCIFLUSH)\n name = input(\" You say:\\n \")\n updateNameDatabase(name)\n return name", "def copy_help_texts_to_database(\n app_config,\n verbosity=2,\n interactive=True,\n using=DEFAULT_DB_ALIAS,\n apps=global_apps,\n **kwargs\n):\n if not _check_app_config(app_config, using):\n return\n\n app_models = [\n app_model\n for app_model in app_config.get_models()\n if not any(\n [\n app_model._meta.abstract,\n app_model._meta.proxy,\n not app_model._meta.managed,\n ]\n )\n ]\n\n columns_comments = {\n model._meta.db_table: get_comments_for_model(model) for model in app_models\n }\n\n if columns_comments:\n add_column_comments_to_database(columns_comments, using)\n\n table_comments = {\n model._meta.db_table: model._meta.verbose_name.title()\n for model in app_models\n if model._meta.verbose_name\n }\n\n if table_comments:\n add_table_comments_to_database(table_comments, using)\n\n if verbosity >= 2:\n for table, columns in columns_comments.items():\n for column, comment in columns.items():\n print(\"Adding comment in %s for %s = '%s'\" % (table, column, comment))\n\n for table, comment in table_comments.items():\n print(\"Adding comment to %s = '%s'\" % (table, comment))", "def main():\n db = sqlite3.connect(DATABASE_NAME)\n create_tables(db)\n\n # Write your code below", "def app_name(self):\n module_filepath = inspect.getfile(type(self))\n parent_dir = os.path.dirname\n app_dirpath = parent_dir(parent_dir(parent_dir(module_filepath)))\n app_name = os.path.basename(app_dirpath)\n return app_name", "def main():\n\n # Has oceanview done something? If this is still false by the end,\n # Display the Usage information.\n did_something = False\n\n # The user wants to clear the database.\n if 'cleardb' in sys.argv:\n did_something = True\n print(\"It's sqlite, just delete the file.\")\n\n # The user wants the test data added to the database.\n if 'maketestdb' in sys.argv:\n did_something = True\n database = data.Database(\"db.sqlite\", \"database/build_db.sql\")\n dbutil.add_test_data(database)\n\n # The user wants the front end launched\n if 'front' in sys.argv or 'both' in sys.argv:\n did_something = True\n frontend = front.init()\n frontend.run(INTERFACE, 8000)\n\n # The user wants the back end launched.\n if 'back' in sys.argv or 'both' in sys.argv:\n did_something = True\n backend = back.init()\n backend.run(INTERFACE, 80)\n\n # did_something is False, nothing was done, show the usage info.\n if did_something is False:\n print(\"Usage: python oceanview.py [command]\")\n print(\"COMMANDS:\")\n print(\" front - start the frontend\")\n print(\" back - start the backend\")\n print(\" both - start both\")\n print(\" maketestdb - add test data to the database\")", "def _extract_appname(self, log):\n appname = \"\"\n if \"appLaunch\" in log:\n appname = log[\"appLaunch\"][\"appName\"]\n else:\n self.logger.info(\"no applaunch field\")\n self.logger.info(log[\"event\"])\n pass \n \n return appname", "def get_name(self) -> str:\n return self.dbname", "def read_database(app):\n app.status.cursorToHourglass()\n app.central.closeAllSubWindows()\n app.database().scan()\n app.status.cursorToNormal() \n app.refresh()", "def main():\r\n\r\n # delete the database file if it already exists\r\n db_path = Path('../../data/db.sqlite')\r\n db_path.unlink(missing_ok=True)\r\n\r\n # create the database\r\n with sqlite3.connect(db_path) as connection:\r\n create_database(connection)", "def main():\n\n parser = init_parser()\n args = parser.parse_args()\n\n # Set up logging.\n level = logging.INFO\n if args.debug:\n level = logging.DEBUG\n logging.basicConfig(format='%(asctime)s %(levelname)s %(filename)s:' \\\n '%(lineno)s %(message)s ', level=level)\n logging.info(\"Logging started\")\n\n message = \"Backing up \"\n if args.source_code:\n message += \"source and \"\n message += \"data for: {0}\".format(args.app_id)\n logging.info(message)\n\n zk_connection_locations = appscale_info.get_zk_locations_string()\n zookeeper = zk.ZKTransaction(host=zk_connection_locations)\n db_info = appscale_info.get_db_info()\n table = db_info[':table']\n\n skip_list = args.skip\n if not skip_list:\n skip_list = []\n logging.info(\"Will skip the following kinds: {0}\".format(sorted(skip_list)))\n ds_backup = DatastoreBackup(args.app_id, zookeeper, table,\n source_code=args.source_code, skip_list=sorted(skip_list))\n try:\n ds_backup.run()\n finally:\n zookeeper.close()", "def main(ctx, verbose):\n return", "def print_mini_help(app_name):\n print \"\\nExecute the script with either '-h' or '--help' to obtain detailed help on how to run the script:\"\n print 'python {0} -h'.format(app_name)\n print \"or\"\n print 'python {0} --help\\n'.format(app_name)", "def startapp():", "def add_db(self):\n name_db = self.name_db.get()\n if len(name_db) > 0:\n self.sql_database.db_name = name_db\n if self.sql_database.create_database():\n msg.showinfo(\n message=\"\".join(\n [str(self.name_db.get()), \" created as text_reader_\", str(self.sql_database.db_name)]))\n self.name_db.delete(0, tk.END)\n self.show_db_combobox()\n else:\n msg.showinfo(message=\"Failed\")\n else:\n msg.showinfo(message=\"Write db name!\")", "def main():\n app = App()\n app.run()", "def db_for_write(self, model, **hints):\n if model._meta.app_label == 'test':\n return 'test'\n return None", "def _writer(self, code):\r\n\t\tfile = open('my_db\\\\kiwi_db.py', 'w')\r\n\t\tfile.write('COMMAND_NAME = ' + code)", "def db_for_write(self, model, **hints):\n if model._meta.app_label == 'delivery':\n return 'db1'\n return None", "def run(output, path):\n\n # Derive path to dbfile\n dbfile = os.path.join(path, \"articles.sqlite\")\n\n # Stream text from database to file\n Export.stream(dbfile, output)", "def LogProcess(self):\n time = datetime.today().strftime('%a %Y%b%d %X')\n# Get user name.\n f = os.popen(\"whoami\",\"r\")\n user = f.read().strip()\n f.close()\n\n entry = '%s\\t%s\\t%s\\t%s\\n' % (time, self.topdir, user, self.version)\n\n if ismounted(c.exams_file):\n# Append info to the exams file.\n try:\n f = open(c.exams_file,'a+')\n f.seek(0, 2)\n f.write(entry)\n f.close()\n except:\n# Not a huge problem if this doesn't work.\n pass", "def getDatabaseName(self):\n raise NotImplementedError", "def write_manifest(self):\n import time\n import sys\n with open('bake-manifest-' + time.strftime('%Y-%m-%d-%H:%M:%S') + \n '.txt', 'w') as hout:\n hout.write(' '.join(sys.argv) + '\\n')\n for k, v in self.table.items():\n hout.write(';'.join([k] + v) + '\\n')", "def sync_apps(self):\n pass", "def name_option(args, run):\n run.experiment_info[\"name\"] = args\n run.run_logger = run.root_logger.getChild(args)", "def applicationsdetails():\n appdicts = db.hgetall('applications')\n finaldict = OrderedDict()\n for appname in sorted(appdicts):\n instances = json.loads(appdicts.get(appname))\n instance_map = OrderedDict()\n for key in sorted(instances):\n instance_map.__setitem__(key,instances.get(key))\n finaldict.__setitem__(appname,instance_map)\n return render_template('robots.html', appdicts=finaldict)", "def dbName(self, code) -> str:\n return f'{code}{self.name}'", "def app_delete(self, name):\n self.core.api.os.shell.cmd('{0} delete app /app.name:\"{1}\"'.format(self.APP_CMD, name))", "def ls():\n cfgmgr = ConfigManager()\n apps = cfgmgr['apps']\n for i in apps:\n print(fc(\"- {g}{appname}{rst}\", appname=i))", "def first_launch():\r\n if os.path.exists('diary.db'):\r\n return False\r\n else:\r\n return True", "def exe_filename(self):", "def current_app(self) -> str:\n app_id = self.app.get_current() # Returns the application ID (string) of the\n foreground_app = [x for x in self.app.list_apps() if app_id == x[\"id\"]][0]\n return foreground_app['title']", "def handle(self, *args, **options):\n app_labels = [app.split('.')[-1] for app in settings.INSTALLED_APPS]\n if not args:\n args = app_labels\n for app in args:\n if app not in app_labels:\n print \"%s is not a valid application\" % app\n continue\n\n app_module = get_app(app_label=app, emptyOK=True)\n if app_module is None:\n continue\n\n print \"Models of %s:\" % app\n for model in get_models(app_module):\n print \" - %s has %d entries\" % (\n model.__name__,\n model.objects.count()\n )", "def show_db_overview(self):\n\n models_list = sorted_models_list()\n apps = [p.app_label for p in settings.SITE.installed_plugins]\n s = \"%d apps: %s.\" % (len(apps), \", \".join(apps))\n s += \"\\n%d models:\\n\" % len(models_list)\n i = 0\n headers = [\n #~ \"No.\",\n \"Name\",\n \"Default table\",\n #~ \"M\",\n \"#fields\",\n \"#rows\",\n #~ ,\"first\",\"last\"\n ]\n rows = []\n for model in models_list:\n if True: # model._meta.managed:\n i += 1\n cells = []\n #~ cells.append(str(i))\n cells.append(fmn(model))\n cells.append(model.get_default_table())\n #~ cells.append(str(model))\n #~ if model._meta.managed:\n #~ cells.append('X')\n #~ else:\n #~ cells.append('')\n cells.append(str(len(model._meta.concrete_fields)))\n qs = model.objects.all()\n n = qs.count()\n cells.append(str(n))\n #~ if n:\n #~ cells.append(obj2str(qs[0]))\n #~ cells.append(obj2str(qs[n-1]))\n #~ else:\n #~ cells.append('')\n #~ cells.append('')\n\n rows.append(cells)\n s += rstgen.table(headers, rows)\n return s" ]
[ "0.60006577", "0.59486735", "0.59302884", "0.59237796", "0.5841282", "0.5828717", "0.5821404", "0.57724136", "0.5769709", "0.57531637", "0.5702471", "0.5648174", "0.56297046", "0.5620219", "0.5586959", "0.55405587", "0.5521775", "0.55170804", "0.5489317", "0.54577833", "0.5446619", "0.544152", "0.54356235", "0.5432898", "0.54144573", "0.540534", "0.5391768", "0.53887933", "0.53866005", "0.5383587", "0.537857", "0.537857", "0.53745127", "0.5370372", "0.536989", "0.5366189", "0.5336084", "0.5330693", "0.5329404", "0.5327674", "0.53233933", "0.5321307", "0.5303798", "0.52677965", "0.52455103", "0.5242761", "0.5219378", "0.5208268", "0.5205501", "0.52049184", "0.5204876", "0.5204876", "0.5204876", "0.5200281", "0.51849407", "0.517736", "0.5176739", "0.5173193", "0.5167591", "0.5160597", "0.5159962", "0.5145831", "0.51403975", "0.5131281", "0.5130956", "0.5127639", "0.5116209", "0.51078343", "0.51037216", "0.50938445", "0.50926083", "0.5090176", "0.5084289", "0.5083767", "0.5082893", "0.50813276", "0.50781876", "0.5076728", "0.5067917", "0.50559264", "0.5053633", "0.50472844", "0.5040914", "0.5032041", "0.50243545", "0.5024023", "0.5016536", "0.5009804", "0.50093204", "0.5008086", "0.50014555", "0.4997847", "0.4997225", "0.49951604", "0.4992779", "0.49901247", "0.49828732", "0.49792507", "0.49768975", "0.49763224", "0.49717474" ]
0.0
-1
Imports a database from the tmp directory. Use very carefully! (or just to remind yourself how to import mysql data) Modify this code directly if needed, as it hardwires the username, db name and filename.
def mysql_import(): # first make another copy of the db run("mysqldump -u database_user database_name -p > ~/tmp/exported_db_temp.sql") # then import from the backup run("mysql -u database_user -p -D database_name < ~/tmp/exported_db.sql")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def import_db(import_file):\n import_data(import_file)", "def test_load_database_from_path(tmp_path):\n path = tmp_path / \"test.db\"\n database = load_database(path_or_database=path, fast_logging=False)\n assert isinstance(database, DataBase)\n assert database.path is not None\n assert database.fast_logging is False", "def importToSQLITE(self, db_file, sqlite_db_name):\n\n command = \"{} {} {} {}\".format('cat', db_file, '| sqlite3', sqlite_db_name)\n call(command, shell = True)", "def initdb():\n\tc, conn = connect()\n\tsql = []\n\twith open('data\\\\database.sql') as f:\n\t\tfor line in f:\n\t\t\tsql.append(line.strip())\n\t\n\tfor query in sql:\n\t\tc.execute(query)\n\tconn.commit()", "def load_db(path_to_db):\n db_run = db(path_to_db) # Instantiates the DB by reading the file\n db_run.import_config_db() # Imports configuration DB\n db_run.conn.row_factory = sqlite3.Row # Better select results\n return(db_run)", "def import_file(filepath, db):\n # Logging\n log_main = logging.getLogger(__name__)\n log_import = log_main.getChild('import_files')\n log_import = log_import.getChild(filepath.split('/')[-1])\n log_import.info('started')\n start = time()\n\n # Variables used in data processing\n memory_buff = StringIO()\n curr = None\n cols = ['tweetID', 'date', 'message', 'username', 'userID', 'language',\n 'longitude', 'latitude', 'retweet']\n sql = \"\"\"COPY \"raw_tweets\" (\"tweetID\", \"date\", \"message\", \"username\", \"userID\", \"language\", \"longitude\", \"latitude\", \"retweet\") \n FROM STDIN \n WITH (FORMAT CSV, HEADER TRUE, DELIMITER '\\t');\n \"\"\"\n \n # Try reading the file\n try:\n df = pd.read_csv(filepath, \n usecols=cols, engine='c', \n memory_map=True, low_memory=False,\n dtype={'userID': np.int64, 'tweetID': np.int64})\n except Exception as e:\n log_import.warn('error on read_csv')\n memory_buff.close()\n print (e)\n return\n\n # Attempt to open up a connection to database.\n try:\n connn = db.connect()\n conn = db.raw_connection()\n curr = conn.cursor()\n except (Exception) as e:\n log_import.warn('error on server connection')\n memory_buff.close()\n if curr is not None:\n curr.close()\n print (e)\n return\n\n # Try copying the files to table.\n try:\n # Save to our buffer\n df[cols].to_csv(memory_buff, sep='\\t',\n header=True, index=False, encoding='utf-8')\n\n # Point buffer to start of memory block\n memory_buff.seek(0)\n\n # Copy records using native Postgres COPY command (FAST)\n curr.copy_expert(sql, memory_buff)\n\n # Save transaction and commit to DB\n conn.commit()\n except (Exception) as e:\n log_import.warn('error while copying to database')\n memory_buff.close()\n if curr is not None:\n curr.close()\n print (e)\n return\n finally:\n memory_buff.close()\n if curr is not None:\n curr.close()\n log_import.info('finished ({:.2f})'.format(time() - start))\n return", "def copy_db():\n with cd(\"/tmp\"), lcd(\"/tmp\"):\n sudo(\"pg_dump gsi > /tmp/latest.sql\", user=\"postgres\")\n run(\"tar zcvf latest.sql.tgz latest.sql\")\n get(\"/tmp/latest.sql.tgz\", \"latest.sql.tgz\")\n sudo(\"rm /tmp/latest.sql.tgz /tmp/latest.sql\")", "def createDataBase(mysql,dbase):\n\tsql = 'CREATE DATABASE IF NOT EXISTS '+ dbase + ';'\n\tmysql.query(sql)\n\tmysql.select_db(dbase)\n\tsql = 'DROP TABLE IF EXISTS names;'\n\tmysql.query(sql)\n\tsql = 'DROP TABLE IF EXISTS files;'\n\tmysql.query(sql)\n\tsql = 'DROP TABLE IF EXISTS linesinfile;'\n\tmysql.query(sql)\n\tsql = 'DROP TABLE IF EXISTS allfiles;'\n\tmysql.query(sql)\n\tsql = 'DROP TABLE IF EXISTS allnames;'\n\tmysql.query(sql)\n\tsql = 'CREATE TABLE names (id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, \\\n\tname TINYTEXT NOT NULL);'\n\tmysql.query(sql)\n\tsql = 'CREATE TABLE files (id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, \\\n\t\tname BIGINT NOT NULL, \\\n\t\tfilename TEXT NOT NULL \\\n\t\tREFERENCES names(id));'\n\tmysql.query(sql)\n\tsql = 'CREATE TABLE linesinfile (id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, \\\n\t\tfilename BIGINT NOT NULL, \\\n\t\tnumber TEXT NOT NULL, \\\n\t\tref TINYTEXT NOT NULL \\\n\t\tREFERENCES files(id));'\n\tmysql.query(sql) \n\tsql = 'CREATE TABLE allfiles (id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, \\\n\tfile TEXT NOT NULL);'\n\tmysql.query(sql)\n\tsql = 'CREATE TABLE allnames (id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, \\\n\t\tfile BIGINT NOT NULL, \\\n\t\tname TEXT NOT NULL \\\n\t\tREFERENCES allfiles(id));'\n\tmysql.query(sql)", "def prepare_db():\n conn = sqlite.connect(\"temp.db\")\n sql = conn.cursor()\n sql.execute(\"SELECT sql FROM sqlite_master WHERE name='points'\")\n rows = sql.fetchall()\n if len(rows) == 0:\n print \"Database does not exist. Creating Database...\"\n sql.execute('''CREATE TABLE points\n (date datetime, humidity real, temp_c real, temp_f real, index_c real, index_f)''')\n print \"Database created\"\n conn.close()", "def load_tables(query_root, data_dir, host, port, db_name, user, password):\n try:\n conn = PGDB(host, port, db_name, user, password)\n try:\n for table in TABLES:\n filepath = os.path.join(data_dir, LOAD_DIR, table.lower() + \".tbl.csv\")\n conn.copyFrom(filepath, separator=\"|\", table=table)\n conn.commit()\n except Exception as e:\n print(\"unable to run load tables. %s\" %e)\n return 1\n conn.close()\n return 0\n except Exception as e:\n print(\"unable to connect to the database. %s\" % e)\n return 1", "def load_testdb(c, dbname=\"test_template\", fpath=\"tests/test_db.sql\"):\n default_env = {\n \"PATH\": os.environ[\"PATH\"],\n \"PYTHONPATH\": os.path.abspath(os.path.dirname(__file__)),\n \"LANG\": \"en_US.UTF-8\",\n \"POSTGRES_DB\": dbname,\n \"POSTGRES_HOST\": \"localhost\",\n \"POSTGRES_USER\": \"postgres\",\n \"POSTGRES_PORT\": \"5432\",\n }\n\n env = os.environ\n env.update(default_env)\n\n psql_command = (\n f'psql -h {default_env[\"POSTGRES_HOST\"]} '\n f'-p {default_env[\"POSTGRES_PORT\"]} '\n f'-U {default_env[\"POSTGRES_USER\"]}'\n )\n\n c.run(f'{psql_command} postgres -c \"drop database if exists {dbname}\";', env=env)\n c.run(f'{psql_command} postgres -c \"create database {dbname}\";', env=env)\n c.run(f\"{psql_command} {dbname} < {fpath}\", env=env)\n # update test db to the latest migrations\n c.run(f\"alembic -c ./alembic.ini upgrade head\", env=env)", "def connect_db_and_load_data(cls):\n db.connect()\n db.create_tables([Product], safe=True)\n load_data(transform_data('./inventory.csv'))", "def db():\n\n db_obj = dump_db.DumpDB()\n db_obj.load_from_csv(CONF.BACKUP_DB_PATH)\n return db_obj", "def connect_dataBase(db_dir, create_cmd):\n just_created = False #flag of is the db is already exist\n \n if not os.path.isfile(db_dir):\n #create the db file in the directory\n with open(db_dir , 'w') as f:\n just_created = True\n #print 'database handler created -- ' , db_dir \n\n try:\n conn = lite.connect(db_dir)\n except lite.Error, e:\n #print \"Error %s:\" % db_dir\n sys.exit(1)\n finally:\n if just_created:\n #create the table \n create_dataBase(conn, create_cmd)\n return True", "def tempdb():\n fd, minitwit.app.config['DATABASE'] = tempfile.mkstemp()\n minitwit.init_db()\n try:\n yield\n finally:\n os.close(fd)\n os.unlink(minitwit.app.config['DATABASE'])", "def _create_local_database(db_file_path):\n conn = sql.connect(db_file_path)\n cur = conn.cursor()\n\n table = str('CREATE TABLE app_config ('\n 'ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,'\n 'Name TEXT UNIQUE NOT NULL,'\n 'Value TEXT);')\n cur.execute(table)\n\n table = str('CREATE TABLE menu_data ('\n 'ContextId TEXT PRIMARY KEY NOT NULL,'\n 'Value TEXT);')\n cur.execute(table)\n\n table = str('CREATE TABLE profiles ('\n 'ID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,'\n 'Guid TEXT NOT NULL UNIQUE,'\n 'IsActive BOOLEAN DEFAULT (0) NOT NULL,'\n 'SortOrder INTEGER NOT NULL);')\n cur.execute(table)\n\n table = str('CREATE TABLE profiles_config ('\n 'Guid TEXT NOT NULL,'\n 'Name TEXT NOT NULL,'\n 'Value TEXT,'\n 'PRIMARY KEY (Guid, Name ),'\n 'FOREIGN KEY (Guid)'\n 'REFERENCES Profiles (Guid) ON DELETE CASCADE ON UPDATE CASCADE);')\n cur.execute(table)\n\n table = str('CREATE TABLE session ('\n 'Name TEXT PRIMARY KEY NOT NULL,'\n 'Value TEXT);')\n cur.execute(table)\n\n table = str('CREATE TABLE settings_monitor ('\n 'Name TEXT PRIMARY KEY NOT NULL,'\n 'Value TEXT);')\n cur.execute(table)\n\n table = str('CREATE TABLE search ('\n 'ID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,'\n 'Guid TEXT NOT NULL REFERENCES profiles (Guid) ON DELETE CASCADE ON UPDATE CASCADE,'\n 'Type TEXT NOT NULL,'\n 'Value TEXT NOT NULL,'\n 'Parameters TEXT,'\n 'LastAccess TEXT);')\n cur.execute(table)\n\n if conn:\n conn.close()", "def test_init_db(self, tmpdir):\n if ENV_DATABASE in os.environ:\n del os.environ[ENV_DATABASE]\n filename = '{}/my.db'.format(str(tmpdir))\n connect_string = 'sqlite:{}'.format(filename)\n os.environ[ENV_DATABASE] = connect_string\n # Call the init_db method to create all database tables\n DatabaseDriver.init_db()\n # Connect to the database and ensure we can run a simple query without\n # and SQL error\n con = DatabaseDriver.connect()\n assert con.execute('SELECT * from team').fetchone() is None\n con.close()", "def load_database(db_session, fixture):\n # TODO: the fixture file path controls\n\n # load the fixture\n datas = pickle.loads(fixture)\n db_session.add_all(datas)\n db_session.commit()\n print \"load database ok\"", "def _load_data(\n data_dir,\n script_dir,\n database: str = \"ibis_testing\",\n **_: Any,\n ) -> None:\n duckdb = pytest.importorskip(\"duckdb\")\n\n schema = (script_dir / 'schema' / 'duckdb.sql').read_text()\n\n conn = duckdb.connect(str(data_dir / f\"{database}.ddb\"))\n for stmt in filter(None, map(str.strip, schema.split(';'))):\n conn.execute(stmt)\n\n for table in TEST_TABLES:\n src = data_dir / f'{table}.csv'\n conn.execute(\n f\"COPY {table} FROM {str(src)!r} (DELIMITER ',', HEADER, SAMPLE_SIZE 1)\"\n )", "def test_load_database_after_pickling(tmp_path):\n path = tmp_path / \"test.db\"\n database = load_database(path_or_database=path, fast_logging=False)\n database = pickle.loads(pickle.dumps(database))\n assert hasattr(database.engine, \"connect\")", "def setup_db(filepath, tables=(), reset=False):\n \n if os.path.exists(filepath) and not reset:\n return\n \n if os.path.exists(filepath) and reset:\n os.remove(filepath)\n \n # create table with appropriate columns\n with get_conn(filepath) as conn:\n for tab in tables:\n make_table(conn, tab.name,\n tab.text_fields, tab.real_fields)", "def initdb():\n db = getdb()\n\n with open(os.path.join(config.BASE_DIRECTORY, 'schema.sql')) as f:\n db.executescript(f.read())", "def iDb(self):\n try:\n self.db.importDb()\n self.accept()\n except PermissionError:\n self.reject()", "def init_db():\n # with current_app.open_resource(\"schema.sql\") as f:\n # db.executescript(f.read().decode(\"utf8\"))\n print(\"初始化数据库脚本文件!!!\")", "def init_db():\n db = get_db()\n cur = db.cursor()\n ##读取SQL文件,获得sql语句的list\n with open(file='./flaskr/schema.sql', mode='r+') as f:\n sql_list = f.read().split(';')[:-1] # sql文件最后一行加上;\n sql_list = [x.replace('\\n', ' ') if '\\n' in x else x for x in sql_list] # 将每段sql里的换行符改成空格\n ##执行sql语句,使用循环执行sql语句\n for sql_item in sql_list:\n # print (sql_item)\n cur.execute(sql_item)", "def import_datafile(db, infile):\n res = stat(infile)\n mtime = datetime.utcfromtimestamp(res.st_mtime)\n\n hash = md5hash(infile)\n\n data_file = db.model.data_file\n\n # Should maybe make sure error is not set\n rec = db.get(data_file, hash)\n # We are done if we've already imported\n if rec is not None:\n return False\n\n # Values to insert\n cols = dict(\n file_hash=hash,\n file_mtime=mtime,\n basename=infile.stem,\n csv_data=None)\n\n try:\n cols['csv_data'] = extract_datatable(infile)\n except NotImplementedError as e:\n secho(str(e), fg='red', dim=True)\n\n tbl = data_file.__table__\n sql = (insert(tbl)\n .values(file_path=str(infile), **cols)\n .on_conflict_do_update(\n index_elements=[tbl.c.file_path],\n set_=dict(**cols)))\n db.session.execute(sql)\n return True", "def dburl(\n tmp_path_factory: pytest.TempPathFactory,\n person_data: pandas.DataFrame,\n student_data: pandas.DataFrame,\n school_data: pandas.DataFrame,\n ) -> str:\n path = tmp_path_factory.mktemp('alchemy') / 'test.db'\n url = f'sqlite:///{path.absolute()}'\n connection = sqlalchemy.create_engine(url)\n person_data.to_sql('person', connection, index=False)\n student_data.to_sql('student', connection, index=False)\n school_data.to_sql('school', connection, index=False)\n return url", "def copy_db():\n local('ssh %s pg_dump -U djangoproject -c djangoproject | psql djangoproject' % env.hosts[0])", "def init_db():\n with LoggerApi.app_context():\n db = get_db()\n with LoggerApi.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def importDatabase(self):\n db_conn.execute(\"INSERT INTO Fietsenstalling (Naam, Achternaam, Telefoon, FietsNr, PIN) VALUES \"\n \"(?, ?, ?, ?, ?);\", (naamInvoer.get(), achternaamInvoer.get(), telefoonnummerInvoer.get(), FietsNr, pincodeInvoer.get()))\n\n db_conn.commit()", "def start ( r, w, args ):\n sqlLoginInfo = parseArgs ( args )\n sql = sqlLogin ( sqlLoginInfo )\n xml = importXml ( r )\n createTables ( sql )\n importDB ( sql, xml )\n exportXML = exportDB ( sql )\n sql.close ()\n exportXml ( w, exportXML )", "def test_import_process(self):\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._delicious_data_test()", "def importData():\n #importChallengeDataToDB()\n importTrendingDataToDB()", "def init_db():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def create_db_from_scratch():\n if os.path.isfile('data.db'):\n os.remove('data.db')\n Base.metadata.create_all(engine)", "def create_db():\r\n\r\n try:\r\n os.remove(proc_loc + 'SF_Parking.db')\r\n print(\"Legacy DB deleted\")\r\n except:\r\n pass\r\n disk_engine = create_engine('sqlite:///'+ proc_loc +'SF_Parking.db')\r\n return sqlite3.connect(proc_loc + 'SF_Parking.db')", "def set_local_database():\n load_config()\n db_path = get_db_path_from_config()\n db = LocalDB(db_path, log=None)\n return db", "def init_db():\n\twith closing(connect_db()) as db:\n\t\twith app.open_resource('schema.sql', mode='r') as f:\n\t\t\tdb.cursor().executescript(f.read())\n\t\tdb.commit()", "def _connect(self):\n database_file = get_database_filename()\n if not os.path.isfile(database_file):\n logger.error('No capabilities database found')\n logger.error(\n 'To import a database from xml: a2p2v --importdb <filename>.xml'\n )\n else:\n self.conn = sqlite3.connect(database_file)", "def test_410_000_non_existant_db(self):\n with TDC() as temp_dir:\n file = Path(temp_dir) / 'database.db'\n self.assertFalse(file.exists(),'Database file exists pre test')\n eng = Engine(file)\n con = eng.connect()\n self.assertTrue(file.exists(), 'Database file does not exists post test')", "def init_db():\n with closing(connect_db()) as db:\n with app.open_resource('schema.sql') as fobj:\n db.cursor().executescript(fobj.read())\n db.commit()", "def connect_database(self, db_path=\"/tmp/example.db\"):\n self.db_path = db_path\n self.db_connector = sqlite3.connect(db_path)\n self.db_connector.close()", "def mysqldump():\n run(\"mysqldump -u database_user database_name -p > ~/tmp/exported_db.sql\")", "def init_db(db_name):\n with sqlite3.connect(db_name) as sql_con:\n sql_con.execute(\"CREATE TABLE IF NOT EXISTS monitored (date, base_dir)\")\n sql_con.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS annotated ('index', title, created,\n project, user, comment, tags, accessed, target, fileset, file_path,\n 'group', organism, sample, channel_0, id, base_dir)\"\"\"\n )", "def db_connect(db_name: str = 'corrections.db'):\n db_exist = False\n\n if os.path.exists(db_name):\n db_exist = True\n\n # Connect to database\n conn = sqlite3.connect(db_name)\n\n # Create cursor (enables sql commands using the sql method)\n cursor = conn.cursor()\n\n if not db_exist:\n\n # load data from CSV files\n\n # HVL table, simulated with SpekCalc.\n hvl_table = pd.read_csv(os.path.join(os.path.dirname(__file__),\n 'table_data',\n 'HVL_simulated.csv'))\n\n # Backscatter and mu_en/rho quotients, from Benmanhlouf et al.\n ks_table = pd.read_csv(os.path.join(os.path.dirname(__file__),\n 'table_data',\n 'KS_table_concatenated.csv'))\n\n # Measured and approximated patient support table transmission.\n tab_pad_table = pd.read_csv(os.path.join(os.path.dirname(__file__),\n 'table_data',\n 'table_transmission.csv'))\n\n # Table containing lab specific parameters.\n device_info_table = pd.read_csv(os.path.join(os.path.dirname(__file__),\n 'table_data',\n 'device_info.csv'))\n\n # Upload tables to database\n\n hvl_table.to_sql('HVL_simulated', conn,\n if_exists='replace', index=False)\n\n ks_table.to_sql('KS_table_concatenated', conn,\n if_exists='replace', index=False)\n\n tab_pad_table.to_sql('table_transmission', conn,\n if_exists='replace', index=False)\n\n device_info_table.to_sql('device_info', conn,\n if_exists='replace', index=False)\n\n # Commits the current transactions\n conn.commit()\n\n return conn, cursor", "def import_data(self):\n\t\tif not self.log_files or len(self.log_files) ==0:\n\t\t\tprint \"There is no log files need to import into database\"\n\t\telse:\n\t\t\tfor log_file in self.log_files:\n\t\t\t\tdata = self.read_file(log_file)\n\t\t\t\tself.conn.insert(data)", "def setup(self):\n #print \"Creating test database...\"\n files = glob.glob(os.path.join(self.home_dir, 'sqlFiles', '*.sql'))\n for fls in files:\n loc = fls.rfind('/')\n #print(\" \" + fls.replace('.sql', '')[loc + 1:])\n flh = open(fls, 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()\n for fls in ['INSERTS', 'TRIGGERS']:\n #print(fls)\n flh = open(os.path.join(self.home_dir, 'sqlFiles', fls), 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()", "def init_database():\n\n # The current dir should be the script home\n homedir = os.path.normpath(\n os.path.dirname(\n sys.executable if getattr(sys, 'frozen', False) else\n __file__)) # cx_Freeze compatibility\n os.chdir(homedir)\n\n engine = create_engine(\"sqlite:///data.db\")\n BASE.metadata.bind = engine\n BASE.metadata.create_all()\n\n return engine", "def createDB():\n print(\"::creating db\")\n filepath = confighome+\"config\"\n\n # open config to get credentials for ssh \n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n creds=jconfig[0]\n\n # ssh in make a directory, initialize it with 'git --bare' \n cmd=\"ssh \"+creds['db']['username']+\"@\"+creds['db']['host']\n cmd_sqrd=\" 'if ! cd swrss_database > /dev/null 2>&1 ; then mkdir swrss_database; cd swrss_database ; fi ; git init --bare ;'\"\n cmd_full=cmd+cmd_sqrd\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)\n if retval != 0:\n print(\"::error encountered. Make sure you have stored your remote's info in the config\")\n\n # locally clone the \"db\"\n cmd_full=\"git clone \"+creds['db']['username']+\"@\"+creds['db']['host']+\":swrss_database\"\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)", "def init_db():\n db = get_db()\n with current_app.open_resource('schema.sql') as f:\n db.executescript(f.read().decode('utf8'))", "def read_database(db_path, db_file, *args):\n\n db_filepath = os.path.join(db_path, db_file)\n\n # list to store loaded data\n data_imported = []\n conn = sqlite3.connect(db_filepath)\n\n for data_name in args:\n\n\n info = f'Reading {data_name} from database................'\n print(info, end=\"\")\n data_name_in_db = conn.execute(\n f\"\"\"SELECT name FROM sqlite_master WHERE type='table' \n AND name='{data_name}'; \"\"\").fetchall()\n if data_name_in_db:\n df = pd.read_sql(f\"select * from {data_name}\", con=conn)\n substitute_names(df)\n # revert single column DataFrame to Series\n if 'index' in df.columns:\n df.set_index('index', inplace=True)\n df = df.squeeze('columns')\n data_imported.append(df)\n print('ok')\n else:\n data_imported.append(None)\n print('no data')\n conn.close()\n return data_imported #if len(data_imported)>1 else data_imported[0]", "def init_db():\n db = get_db()\n\n with current_app.open_resource(\"schema.sql\") as f:\n db.executescript(f.read().decode(\"utf8\"))", "def _init_db():\n c = ppc.app().config['PUBLICPRIZE']['DATABASE']\n e = os.environ.copy()\n e['PGPASSWORD'] = c['postgres_pass']\n subprocess.call(\n ['createuser', '--host=' + c['host'], '--user=postgres',\n '--no-superuser', '--no-createdb', '--no-createrole', c['user']],\n env=e)\n p = subprocess.Popen(\n ['psql', '--host=' + c['host'], '--user=postgres', 'template1'],\n env=e,\n stdin=subprocess.PIPE)\n s = u\"ALTER USER {user} WITH PASSWORD '{password}'\".format(**c)\n enc = locale.getlocale()[1]\n loc = locale.setlocale(locale.LC_ALL)\n p.communicate(input=bytes(s, enc))\n subprocess.check_call(\n ['createdb', '--host=' + c['host'], '--encoding=' + enc,\n '--locale=' + loc, '--user=postgres',\n '--template=template0',\n '--owner=' + c['user'], c['name']],\n env=e)", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def cli_init(dbfile, demo):\n with atomic(dbfile) as cursor:\n create_tables(cursor)\n if demo:\n create_user(cursor, 'tester', 'pw', '[email protected]')\n create_user(cursor, 'special_tester', 'pw',\n '[email protected]',\n groups=['special'])", "def create_db(self, path: str) -> None:\n if os.path.isfile(path):\n self.db_path = path\n print(\"DB already exists\")\n return\n\n print(path)\n\n self.db_path = path\n\n print(\"Opening the base db\")\n with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'basedb.xml'), 'r') as f:\n base = f.read()\n print(\"Reading the base as {0}\".format(base))", "def create_database_stock_master():\n sql = \"\"\"\n CREATE DATABASE stock_master;\n \"\"\"\n excute_sql(sql,None)", "def import_table_data(con, cur, tbl_name):\n\n # Read schema from external file and create table according to schema\n schemas = import_schemas_from_file()\n tbl_schema = schemas[tbl_name]\n create_table(cur, tbl_name, tbl_schema)\n\n # Loop through CSV file and prepare data for import\n file_records = []\n create_query_str = \"\"\"INSERT INTO {} VALUES {}\"\"\".format(tbl_name, '(' + ','.join(['%s'] * len(tbl_schema)) + ')')\n table_csv_path = CSV_PATH + tbl_name + '.csv'\n\n with open(table_csv_path) as csv_file:\n reader = csv.reader(csv_file, delimiter=',')\n for i, line in enumerate(reader):\n record = [schema_process(tbl_schema, j, item) for j, item in enumerate(line)]\n file_records.append(record)\n # Import records into the MySQL database table, 1,000 records at a time\n if i % 1000 == 0:\n print('inserting 1000 rows')\n cur.executemany(create_query_str, file_records)\n con.commit()\n file_records = []\n # Insert any remaining records.\n print('inserting {} rows'.format(len(file_records)))\n cur.executemany(create_query_str, file_records)\n con.commit()", "def get_db() -> mysql.connector.connection.MySQLConnection:\n username = getenv('PERSONAL_DATA_DB_USERNAME')\n password = getenv('PERSONAL_DATA_DB_PASSWORD')\n host = getenv('PERSONAL_DATA_DB_HOST')\n db = getenv('PERSONAL_DATA_DB_NAME')\n\n conect = mysql.connector.connection.MySQLConnection(\n host=host,\n user=username,\n password=password,\n database=db\n )\n return conect", "def connect_sql(path):\n\n global connection, cursor\n\n connection = sqlite3.connect(path)\n cursor = connection.cursor()\n connection.commit()", "def setup(db):\n global conn, curs\n # normal:\n conn = sqlite3.connect(db) # typical db: '/var/lib/synda/sdt/sdt.db'\n # or test db: '/home/painter/db/sdt.db'\n #curs = conn.cursor() now done at the time of curs.execute() ...\n #...safer to get the cursor when needed, and close it quickly: doesn't lock out other processes", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def create_test_db(filename: str):\n database = sqlite3.connect(filename)\n cursor = database.cursor()\n # Create the tables\n cursor.executescript(open(DATABASE_TABLE_CREATE, \"r\").read())\n # Insert data\n cursor.executescript(open(DATABASE_DATA_CREATE, \"r\").read())\n # Save (commit) the changes\n database.commit()", "def init_db():\n with app.app_context():\n db = connect_db()\n with app.open_resource('schema.sql') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def setup_database():\n from django.core.management import call_command\n from django import setup\n setup()\n call_command('migrate', verbosity=0, interactive=False)\n call_command('loaddata', data('initial_data.json'), verbosity=0, interactive=False)", "def connect_to_database():\r\n\r\n # Attempting a connection to the database\r\n try:\r\n temp_connection = sqlite3.connect('Resources/media.db')\r\n except Error:\r\n print(\"Error: Could not connect to the database. Make sure the file \\\"media.db\\\" is located inside the \"\r\n \"\\\"Resources\\\" folder.\")\r\n return False\r\n\r\n if config_var['RUN-MODE']['run_mode'] == \"2\": # Debugging mode\r\n print(\"\\nSuccessfully connected to the database.\")\r\n\r\n # The variable storing the SQL command for creating the \"media\" table\r\n create_table = \"\"\" CREATE TABLE IF NOT EXISTS media (\r\n id integer PRIMARY KEY,\r\n title text NOT NULL,\r\n artist text NOT NULL,\r\n album text,\r\n release_date DATE,\r\n tags TEXT,\r\n mode BIT default 0 NOT NULL,\r\n full_path text NOT NULL UNIQUE\r\n ); \"\"\"\r\n\r\n # Attempting to create the \"media\" table if it doesn't exist already\r\n try:\r\n cursor = temp_connection.cursor()\r\n cursor.execute(create_table)\r\n temp_connection.commit()\r\n cursor.close()\r\n except Error as e:\r\n print(\"Error: Could not create the table.\")\r\n print(e)\r\n return False\r\n\r\n if config_var['RUN-MODE']['run_mode'] == \"2\": # Debugging mode\r\n print(\"\\nThe media table exists or has been created successfully..\")\r\n\r\n # Returning the connection variable so that it can be used by other features of the application\r\n return temp_connection", "def create_db(db_file):\n conn = None\n try:\n conn = sqlite3.connect(db_file)\n print(\"Database created, version = \", sqlite3.version)\n except Error as e:\n print(e)\n finally:\n conn.close()", "def database(db):\n if type(db) is str:\n # Database name\n if db.endswith('.py'):\n # Python source, exec it\n globals = {}\n exec(compile(open(db).read(), db, 'exec'), globals)\n if 'DB' in globals:\n db = globals['DB']\n else:\n storage = globals['Storage']\n from ZODB.DB import DB\n db = DB(storage, cache_size=4000)\n elif db.endswith(\".fs\"):\n from ZODB.DB import DB\n from ZODB.FileStorage import FileStorage\n storage = FileStorage(db)\n db = DB(storage, cache_size=4000)\n\n # The following will fail unless the application has been configured.\n from zope.event import notify\n notify(zope.processlifetime.DatabaseOpened(db))\n\n return db", "def setup(db_file):\n global session\n\n db_conn = \"sqlite:///%s\" % db_file\n logger.info(\"DB Connection: %s\" % db_conn)\n engine = create_engine(db_conn, connect_args={'check_same_thread':False})\n engine.Echo = True\n Base.metadata.create_all(engine)\n\n Session = scoped_session(sessionmaker(bind=engine))\n session = Session()\n print \"DB Connection: %s\" % db_conn", "def connect(filename=DATABASE_FILENAME):\n if not path.exists(filename):\n raise FileNotFoundError(\"Database file not found: \" + filename)\n with open(filename, 'r', encoding=\"utf-8\") as f:\n return Database(json.load(f))", "def _create_db(db_name):\n template_conn.execute('commit')\n template_conn.execute('create database {}'.format(db_name))", "def create_ivr_database(dbname):\n conn = sqlite3.connect(dbname)\n cursor = conn.cursor()\n parent_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n sql_script_path = os.path.join(parent_directory, 'db.sql')\n sql_script_file = open(sql_script_path, 'r')\n sql = sql_script_file.read()\n sql_script_file.close()\n cursor.executescript(sql)\n cursor.close()\n conn.close()", "def create_database():\n db_user = 'expensetracker' # define these\n db_pass = 'beta'\n db_table = 'expensetracker'\n\n local('psql -U postgres -c \"DROP ROLE IF EXISTS %s\"'%db_user)\n local('psql -U postgres -c \"CREATE USER %s WITH NOCREATEDB NOCREATEUSER ENCRYPTED PASSWORD E\\'%s\\'\"' % (db_user, db_pass))\n local('psql -U postgres -c \"DROP DATABASE IF EXISTS %s\"'%db_table)\n local('psql -U postgres -c \"CREATE DATABASE %s WITH OWNER %s\"' % (\n db_table, db_user))", "def load_file():\n global list_of_table, data_base, new_data\n open_name = askopenfilename()\n\n if Path(open_name).suffix == '.db':\n data_base = open_name\n data_base = str(data_base)\n new_data_base = parse(data_base)\n new_data = update_list_tables(new_data_base)\n new_data.clear()\n\n else:\n mistake_db_file()", "def setup(db_name = 'net.db', **extra_params):\n global db_run # Imports the DB from the simulator\n \n# # If the file already exists delete it\n if DEBUG: print \"[ pyNN ] : Opening DB\", os.path.abspath(db_name)\n if os.path.exists(db_name):\n if DEBUG: print \"[ pyNN ] : DB already initialized... cleaning up... removing file %s\" % db_name\n os.remove(db_name)\n db_run = db(db_name) # Creates the DB \n db_run.init_db() # Initializes the DB\n return(db_run)", "def main(csvfile, dbfile, verbose=False):\n CONN = sqlite3.connect(dbfile)\n cursor = CONN.cursor()\n create_schema(cursor)\n process_data(cursor, csvfile, verbose=verbose)\n CONN.commit()\n CONN.close()", "def initialise_database():\n with cd(code_dir):\n run(python_add_str + \"python manage.py syncdb --all\")\n run(python_add_str + \"python manage.py migrate --fake\")", "def db_small_path():\n return os.path.join(_here, 'fixtures/databases/db-small/database')", "def structure_and_repopulate_db() -> None:\n with open('db.sql', encoding=\"utf-8\") as f:\n commands = f.read().strip().split(';')\n commands = [command.strip() for command in commands]\n for command in commands:\n my_cursor.execute(command)\n my_db.commit()\n print('Source structure created, data repopulated')", "def test_import_process(self):\r\n good_file = self._get_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._chrome_data_test()", "def init_db():\n # Open connection to the database\n conn = sqlite3.connect(DB_PATH)\n cursor = conn.cursor()\n\n # Open the schema file and execute its SQL code\n with current_app.open_resource('schema.sql') as db_schema:\n cursor.executescript(db_schema.read().decode('utf8'))\n\n # Save (commit) the changes\n conn.commit()\n\n # We can also close the connection if we are done with it.\n conn.close()", "def load_db(db_path):\n return pd.read_csv(db_path)", "def init_new_db(args):\n Base.metadata.drop_all(engine)\n Base.metadata.create_all(engine)\n session = Session()\n session.add(Environment(name='normal', slickurl='http://slicker.homestead-corp.com/slickij', buildurl='?', filename='hs-tcrunij.tar.gz', tcrunijsubdir='hs-tcrunij/tcrunij'))\n session.add(Environment(name='dev', slickurl='http://octomom.homestead-corp.com/slickij', buildurl='?', filename='tcrunij.tar.gz', tcrunijsubdir='tcrunij/tcrunij'))\n session.commit()", "def get_db(file_path):\n db_new = not os.path.isfile(file_path)\n sqlite3_detect_types = sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES\n db = sqlite3.connect(file_path, detect_types=sqlite3_detect_types)\n if db_new:\n create_db(db)\n return db", "def make_db():\n\n db.create_all()", "def load_directory_as_db(self, dir_path, db_name):\n load_dir = os.path.join(self.data_dir, dir_path)\n data_files = glob.glob(os.path.join(load_dir, '*.txt'))\n file_groups = defaultdict(list)\n for path in data_files:\n path_noext, _ = os.path.splitext(path)\n filename_noext = os.path.basename(path_noext)\n i = filename_noext.find('-')\n if i == -1:\n table_name = filename_noext\n else:\n table_name = filename_noext[:i]\n file_groups[table_name].append(path)\n\n for table_name in sorted(file_groups.keys()):\n register_name = '{}_{}'.format(db_name, table_name)\n data_files = file_groups[table_name]\n logger.info('REGISTERING {}:{}'.format(register_name, data_files))\n data_files = filter(lambda x: os.path.getsize(x) > 0, data_files)\n if self.load_tables and register_name not in self.load_tables:\n continue\n jdb = self.sql_context.read.json(data_files)\n jdb.printSchema()\n jdb.registerTempTable(register_name)", "def create_db(db_name: str = DB_NAME) -> DBConnection:\n connection = open_db(db_name)\n connection.execute(\"\"\"\n CREATE TABLE docs\n (did INTEGER PRIMARY KEY, \n title TEXT NOT NULL, \n url TEXT NOT NULL)\n \"\"\")\n connection.execute(\"\"\"\n CREATE TABLE tfs \n (did INTEGER,\n term TEXT NOT NULL,\n tf INTEGER)\n \"\"\")\n connection.execute(\"\"\"\n CREATE TABLE boost\n (did INTEGER,\n date INTEGER,\n page INTEGER\n )\"\"\")\n print(f\"[+] Created db {DB_NAME}\")\n return connection", "def setupDbEnv(baseDirPath=None):\n global gDbEnv, gDbDirPath\n\n if not baseDirPath:\n baseDirPath = DATABASE_DIR_PATH\n\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n try:\n os.makedirs(baseDirPath)\n except OSError as ex:\n baseDirPath = ALT_DATABASE_DIR_PATH\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n os.makedirs(baseDirPath)\n else:\n if not os.access(baseDirPath, os.R_OK | os.W_OK):\n baseDirPath = ALT_DATABASE_DIR_PATH\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n os.makedirs(baseDirPath)\n\n gDbDirPath = baseDirPath # set global\n\n gDbEnv = lmdb.open(gDbDirPath, max_dbs=MAX_DB_COUNT)\n # creates files data.mdb and lock.mdb in dbBaseDirPath\n\n # create named dbs (core and tables)\n gDbEnv.open_db(b'core')\n gDbEnv.open_db(b'hid2did') # table of dids keyed by hids\n gDbEnv.open_db(b'did2offer', dupsort=True) # table of offer expirations keyed by offer relative dids\n gDbEnv.open_db(b'anon', dupsort=True) # anonymous messages\n gDbEnv.open_db(b'expire2uid', dupsort=True) # expiration to uid anon\n\n return gDbEnv", "def create_database():\n # Depending on your local settings, you may need to specify a user and password, e.g.\n # conn = psycopg2.connect(dbname=\"postgres\", user=\"postgres\", password=\"password\")\n conn = psycopg2.connect(dbname=\"postgres\")\n conn.autocommit = True # it seems this mode is needed to make a db\n conn.set_isolation_level(0) # also this for dropping db\n\n # un-comment this line if you already have a database called\n # `opportunity_youth` and you want to drop it\n # execute_sql_script(conn, \"01_drop_old_database.sql\")\n execute_sql_script(conn, \"02_create_new_database.sql\")\n\n conn.close()", "def mysql_load(filename):\n fn = os.path.join(SQL_PATH, filename)\n if not os.path.exists(fn):\n raise IOError(\"Can't find file {0}\".format(fn))\n\n print '* Load file {0: <70}'.format(filename),\n sys.stdout.flush()\n if subprocess.call(\n 'mysql -u %s -p\"%s\" < %s' % (\n MYSQL_USER, MYSQL_PASSWORD, fn\n ), shell=True\n ) == 1:\n raise RuntimeError(\"ERROR: Can't load %s\" % filename)\n\n print '[ OK ]'", "def setUp(self):\n self.dbfile = tempfile.NamedTemporaryFile(delete=False)\n self.path = self.dbfile.name\n self.db = Tkvdb(self.path)", "def connect_db(file_name):\n db = sqlite3.connect(file_name)\n c = db.cursor()\n return db, c", "def connecting_database():\n try:\n engine = create_engine(\n 'sqlite:///{}{}'.format(path, dbfile),\n echo=False\n )\n return engine\n except Exception as error:\n print(\">> Something wrong with the system!\")\n print(f\">> Error: {error}\")", "def create_db(self, db_name, sql_paths_list=[], db_user='root', db_pass='', db_host='localhost', queue=None):\n\n path = ''\n #makes the list when obtained from command line, backend can pass list directly\n if isinstance(sql_paths_list, str):\n sql_paths_list = sql_paths_list.split(';')\n\n if db_pass:\n cmd = path + 'mysql -u{0} -h{1} -p{2} -e \"create database {3};\"'.format(db_user, db_host, db_pass, db_name)\n else:\n cmd = path + 'mysql -u{0} -h{1} -e \"create database {2};\"'.format(db_user, db_host, db_name)\n\n to_put = []\n out = self.run_task(cmd)\n cmd = 'mysql create database {}'.format(db_name)\n to_put.append([cmd, out.split('\\n')])\n\n for each in sql_paths_list:\n if db_pass:\n cmd = path + 'mysql -u{0} -h{1} -p{2} {3} < '.format(db_user, db_host, db_pass, db_name) + each\n else:\n cmd = path + 'mysql -u{0} -h{1} {2} < '.format(db_user, db_host, db_name) + each\n\n out = self.run_task(cmd)\n cmd = 'mysql execute sql {}'.format(each)\n to_put.append([cmd, out.split('\\n')])\n\n if queue:\n queue.put(to_put)" ]
[ "0.68345016", "0.64033103", "0.6076044", "0.6069602", "0.60656613", "0.6017671", "0.59419686", "0.59211564", "0.5919211", "0.5879916", "0.57659775", "0.5732443", "0.5716429", "0.5715574", "0.5714675", "0.5714328", "0.568365", "0.56806725", "0.56564546", "0.5650345", "0.56437576", "0.56302184", "0.56176937", "0.55936867", "0.558858", "0.5580476", "0.55759877", "0.55394083", "0.55270535", "0.5521717", "0.5513789", "0.55003715", "0.54924995", "0.5483396", "0.5474473", "0.54707694", "0.54621756", "0.5447067", "0.54327166", "0.5428179", "0.54191506", "0.5406435", "0.53956205", "0.53848565", "0.53746355", "0.5370775", "0.53698045", "0.5367137", "0.53661144", "0.53642094", "0.5347662", "0.5346606", "0.5343317", "0.53356516", "0.53356516", "0.53356516", "0.53356516", "0.53356516", "0.53356516", "0.53356516", "0.53328484", "0.53288406", "0.5327686", "0.53262174", "0.5319538", "0.53191966", "0.5317154", "0.5311793", "0.53067356", "0.5303728", "0.52940035", "0.52931494", "0.5289569", "0.52844065", "0.52821684", "0.52810574", "0.5272298", "0.52712554", "0.5269794", "0.5268908", "0.52639115", "0.52610946", "0.5259994", "0.5258378", "0.52567786", "0.5255631", "0.52542853", "0.5253981", "0.52515095", "0.5247364", "0.52436024", "0.5239579", "0.5232598", "0.52287406", "0.5226028", "0.5219646", "0.52178484", "0.5216698", "0.52093047", "0.52068937" ]
0.7754526
0
Set up an ssh shortcut. Called by setup_ssh_keys. You can call it separately if desired.
def update_ssh_shortcut(output_keyfile, quickname=None): if quickname: with settings(warn_only=True): local("touch $HOME/.ssh/config") local(r"echo '' >> $HOME/.ssh/config") local(r"echo 'Host %s' >> $HOME/.ssh/config" % quickname) local(r"echo '' >> $HOME/.ssh/config") local(r"echo 'Hostname %s' >> $HOME/.ssh/config" % host_name) local(r"echo 'User %s' >> $HOME/.ssh/config" % user) local(r"echo 'IdentityFile ~/.ssh/%s' >> $HOME/.ssh/config" % output_keyfile) local(r"echo 'ServerAliveCountMax 3' >> $HOME/.ssh/config") local(r"echo 'ServerAliveInterval 10' >> $HOME/.ssh/config")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createPuttyShortcuts(folder = \"Putty Connections\"):\n desktop = winshell.desktop()\n cpath = os.path.join(desktop, folder)\n\n if not os.path.exists(cpath):\n os.mkdir(cpath)\n \n for c in getPuttyConnections():\n if c.strip() != \"\":\n path = os.path.join(cpath, c + \".lnk\")\n target = \"C:\\\\Program Files (x86)\\\\PuTTY\\\\putty.exe\"\n args = \"-load \" + c\n wdir = \"C:\\\\Program Files (x86)\\PuTTY\\\\\"\n try:\n createShortcut(path, target, wdir = wdir, args = args)\n except Exception, e:\n print \"could not create shortcut for \" + c", "def setup_ssh_keys(output_keyfile=\"id_rsa\", ssh_type=\"rsa\", quickname=None):\n with settings(warn_only=True):\n local(\"mkdir -p $HOME/.ssh\")\n with cd(\"$HOME/.ssh\"):\n local(\"ssh-keygen -t %s -f %s\" % (ssh_type, output_keyfile))\n for host in env.hosts:\n local(\"scp %s.pub %s:temp_id_key.pub\" % (output_keyfile, host))\n with settings(warn_only=True):\n run(\"mkdir -p $HOME/.ssh\")\n run(\"cat $HOME/temp_id_key.pub >> ~/.ssh/authorized_keys\")\n run(\"rm $HOME/temp_id_key.pub\")\n run(\"chmod 600 $HOME/.ssh/authorized_keys\")\n run(\"chmod 700 $HOME/.ssh\")\n run(\"chmod go-w $HOME\")\n if quickname:\n update_ssh_shortcut(output_keyfile, quickname)", "def cmd_setup_ssh(public_key_file):\n\n def add_helper(key_file):\n if exists(key_file):\n try:\n fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]\n key = open(key_file, 'r').read().strip()\n echo(\"Adding key '{}'.\".format(fingerprint), fg='white')\n setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)\n except Exception:\n echo(\"Error: invalid public key file '{}': {}\".format(key_file, format_exc()), fg='red')\n elif public_key_file == '-':\n buffer = \"\".join(stdin.readlines())\n with NamedTemporaryFile(mode=\"w\") as f:\n f.write(buffer)\n f.flush()\n add_helper(f.name)\n else:\n echo(\"Error: public key file '{}' not found.\".format(key_file), fg='red')\n\n add_helper(public_key_file)", "def _start_ssh(self):\n try:\n message = '\\nEnter number you want to connect: '\n num = raw_input(message)\n while not int(num) in self.instance_list:\n num = raw_input(message)\n\n message_user = 'Enter username for ssh_login(blank = %s): ' % DEFAULT_USER \n user = raw_input(message_user)\n if not user:\n user = DEFAULT_USER\n \n target = self.instance_list[int(num)]\n ssh_key_path = os.path.join(SSH_DIR, target['key'])\n if not os.path.exists(ssh_key_path):\n print 'SSH key not found! KEY_PATH[ %s ]' % ssh_key_path\n return\n\n command = COMMAND % {'sshkey' : ssh_key_path, 'user' : user, 'server' : target['dns'], 'port' : self.port}\n\n print 'Connecting to \"%s\"... [SSH COMMAND: %s ]' % (target['name'], command)\n os.system(command)\n except KeyboardInterrupt:\n print '\\nAborted!'\n finally:\n sys.exit()", "def open_ssh():\n print('Opening SSH...')", "def __setup_deploy(self):\r\n # Create a SSH Key-pair and push it to the robot\r\n if not self.ssh_key.exists():\r\n subprocess.run(['ssh-keygen',\r\n '-b', '4096',\r\n '-t', 'rsa',\r\n '-f', self.ssh_key,\r\n '-q', '-N', ''\r\n ])\r\n\r\n os.chmod(self.ssh_key, 0o600)\r\n os.chmod(self.ssh_pub, 0o600)\r\n print('Please enter the password if asked.')\r\n subprocess.run(\r\n ['ssh-copy-id',\r\n '-i', self.ssh_key,\r\n 'robot@{}'.format(self.settings['ip'])\r\n ], stderr=open(os.devnull, 'wb'))\r\n print('Try to log into the brick:')\r\n print('\\tssh -i {} robot@{}'.format(self.ssh_key, self.settings['ip']))", "def setup_shortcuts():\n os.system(\"gsettings set org.gnome.shell.extensions.dash-to-dock hot-keys false\")", "def setupSSH(key_rsa_path, key_append_path, key_gen_cmd, HostList):\n # Generate SSH key on localhost\n LocalKey = getLocalKey(key_gen_cmd, key_rsa_path)\n\n # Setup passwordless SSH with each of the specified machines\n for i in HostList:\n if i[0] != 'localhost':\n\n box_ip = i[1]\n user = i[2]\n pwd = i[3]\n\n out = subprocess.Popen(\"echo $\" + user, shell=True,\n stdout=subprocess.PIPE)\n box_user = out.stdout.read().rstrip('\\n')\n out = subprocess.Popen(\"echo $\" + pwd, shell=True,\n stdout=subprocess.PIPE)\n box_pwd = out.stdout.read().rstrip('\\n')\n try:\n\n RemoteKey = getRemoteKey(key_gen_cmd, key_rsa_path, box_ip,\n box_user, box_pwd)\n appendLocalKeyInRemote(LocalKey, key_append_path, box_ip,\n box_user, box_pwd)\n appendRemoteKeyInLocal(RemoteKey, key_append_path, box_ip)\n logging.info(\"Passwordless SSH has been setup b/w \\\n localhost & %s\", box_ip)\n\n except (paramiko.SSHException, paramiko.BadHostKeyException,\n paramiko.AuthenticationException, socket.error) as e:\n logging.info(\"Passwordless SSH setup failed b/w localhost & %s \\\n with %s, please verify host connectivity\", box_ip, e)", "def installShortcutKeys(self):\r\n #TODO: Deal with commented out shortcuts\r\n Key_Escape = 0x01000000 # not in PythonQt\r\n Key_Space = 0x20 # not in PythonQt\r\n self.shortcuts = []\r\n keysAndCallbacks = (\r\n # ('z', self.toolsBox.undoRedo.undo),\r\n # ('y', self.toolsBox.undoRedo.redo),\r\n ('h', self.toggleCrosshair),\r\n (Key_Escape, lambda : self.editor.setActiveEffect(None)),\r\n ('e', lambda : self.editor.setActiveEffect(self.editor.effectByName('Erase'))),\r\n ('p', lambda : self.editor.setActiveEffect(self.editor.effectByName('Paint'))),\r\n ('d', lambda : self.editor.setActiveEffect(self.editor.effectByName('Draw'))),\r\n ('w', lambda : self.editor.setActiveEffect(self.editor.effectByName('Wand'))),\r\n ('r', lambda : self.editor.setActiveEffect(self.editor.effectByName('Rectangle'))),\r\n # (Key_Space, self.toolsBox.toggleFloatingMode),\r\n )\r\n for key,callback in keysAndCallbacks:\r\n shortcut = qt.QShortcut(slicer.util.mainWindow())\r\n shortcut.setKey( qt.QKeySequence(key) )\r\n shortcut.connect( 'activated()', callback )\r\n self.shortcuts.append(shortcut)", "def create_shortcut(startup_path):\n\n startup = startup_path\n path = os.path.join(startup, \"shortcut.lnk\")\n target = os.path.dirname(os.path.dirname(__file__))+str(\"\\LEAP_MyMouse_.exe\")\n icon = os.path.dirname(os.path.dirname(__file__))+str(\"\\\\res\\icons\\leapmymouse.png\")\n\n shell = win32com.client.Dispatch(\"WScript.Shell\")\n shortcut = shell.CreateShortCut(path)\n shortcut.Targetpath = target\n shortcut.IconLocation = icon\n shortcut.WindowStyle = 7 # 7 - Minimized, 3 - Maximized, 1 - Normal\n shortcut.save()", "def main():\n # Set these to your own details.\n myssh = connect('example.com')\n myssh.put('ssh.py')\n myssh.close()", "def ssh_setup(existing_key: Optional[Path] = None, force: bool = False):\n\n if not shutil.which(\"ssh\"):\n raise errors.SSHNotFoundError()\n\n system_config = SystemSSHConfig()\n\n include_string = f\"Include {system_config.renku_ssh_root}/*.conf\\n\\n\"\n\n if include_string not in system_config.ssh_config.read_text():\n with system_config.ssh_config.open(mode=\"r+\") as f:\n content = f.read()\n f.seek(\n 0, 0\n ) # NOTE: We need to add 'Include' before any 'Host' entry, otherwise it is included as part of a host\n f.write(include_string + content)\n\n if not existing_key and not force and system_config.is_configured:\n communication.confirm(f\"Keys already configured for host {system_config.renku_host}. Overwrite?\", abort=True)\n\n if existing_key:\n communication.info(\"Linking existing keys\")\n existing_public_key = existing_key.parent / (existing_key.name + \".pub\")\n\n if not existing_key.exists() or not existing_public_key.exists():\n raise errors.KeyNotFoundError(\n f\"Couldn't find private key '{existing_key}' or public key '{existing_public_key}'.\"\n )\n\n if system_config.keyfile.exists():\n system_config.keyfile.unlink()\n if system_config.public_keyfile.exists():\n system_config.public_keyfile.unlink()\n\n os.symlink(existing_key, system_config.keyfile)\n os.symlink(existing_public_key, system_config.public_keyfile)\n else:\n communication.info(\"Generating keys\")\n keys = generate_ssh_keys()\n system_config.keyfile.touch(mode=0o600)\n system_config.public_keyfile.touch(mode=0o644)\n with system_config.keyfile.open(\n \"wt\",\n ) as f:\n f.write(keys.private_key)\n\n with system_config.public_keyfile.open(\"wt\") as f:\n f.write(keys.public_key)\n\n communication.info(\"Writing SSH config\")\n with system_config.jumphost_file.open(mode=\"wt\") as f:\n # NOTE: The * at the end of the jumphost name hides it from VSCode\n content = textwrap.dedent(\n f\"\"\"\n Host jumphost-{system_config.renku_host}*\n HostName {system_config.renku_host}\n Port 2022\n User jovyan\n \"\"\"\n )\n f.write(content)", "def init():\n\n @click.command()\n @click.option('--cell', required=True,\n envvar='TREADMILL_CELL',\n callback=cli.handle_context_opt,\n expose_value=False)\n @click.option('--ssh', help='SSH client to use.',\n type=click.Path(exists=True, readable=True))\n @click.argument('app')\n @click.argument('command', nargs=-1)\n def ssh(ssh, app, command):\n \"\"\"SSH into Treadmill container.\"\"\"\n if ssh is None:\n ssh = _DEFAULT_SSH\n\n if app.find('#') == -1:\n # Instance is not specified, list matching and exit.\n raise click.BadParameter('Specify full instance name: xxx#nnn')\n\n app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh')\n app_discovery.sync()\n\n # Restore default signal mask disabled by python spawning new thread\n # for Zk connection.\n #\n # TODO: should this be done as part of zkutils.connect?\n for sig in range(1, signal.NSIG):\n try:\n signal.signal(sig, signal.SIG_DFL)\n except OSError:\n pass\n\n # TODO: not sure how to handle mutliple instances.\n for (app, hostport) in app_discovery.items():\n _LOGGER.info('%s :: %s', app, hostport)\n if hostport:\n host, port = hostport.split(b':')\n run_ssh(host, port, ssh, list(command))\n\n return ssh", "def install_ssh(app):\n os.system('lxc-attach -n %s -- apk update' % app)\n os.system('lxc-attach -n %s -- apk add openssh' % app)\n # Config sshd\n config = '/var/lib/lxc/%s/rootfs/etc/ssh/sshd_config' % app\n with open(config, \"a\") as myfile:\n myfile.write(\"RSAAuthentication yes\\nPubkeyAuthentication yes\\nPermitRootLogin yes\\nPermitEmptyPasswords yes\")\n os.system('lxc-attach -n %s -- /etc/init.d/sshd start' % app)", "def ssh():\n env['remote_port'] = env['port_map']['22']\n\n sys.stdout.write('Connecting to SSH session on remote port %(remote_port)s\\n' % env)\n\n run('chmod 600 %(pair_private_key)s' % env)\n\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.connect(\n hostname=env['relay_server'],\n port=int(env['remote_port']),\n username=env['pair_user'],\n key_filename=env['pair_private_key']\n )\n\n channel = client.invoke_shell()\n posix_shell(channel)", "def ssh(args, config):\n print('{}'.format(ssh.__doc__))", "def connect_instance(tag, key_name, user_name):\n inst = get_instance(tag)\n cmd = boto.manage.cmdshell.sshclient_from_instance(\n inst,\n SSH_FOLDER + key_name + \".pem\",\n user_name=user_name\n )\n return inst, cmd", "def setupShortcuts(self):\r\n # productive\r\n profprint()\r\n macros = (\r\n (\"Ctrl+Return\", self.segmentNeedle),\r\n (\"Ctrl+z\", self.logic.deleteLastNeedle),\r\n (\"Ctrl+y\", self.acceptNeedleTipEstimate),\r\n (\"Ctrl+n\", self.rejectNeedleTipEstimate),\r\n (\"Ctrl+u\", self.acceptNeedleTipEstimateAsNewTempMarker),\r\n )\r\n\r\n for keys, f in macros:\r\n k = qt.QKeySequence(keys)\r\n s = qt.QShortcut(k, slicer.util.mainWindow())\r\n s.connect('activated()', f)\r\n s.connect('activatedAmbiguously()', f)\r\n print \"'%s' -> '%s'\" % (keys, f.__name__)\r\n # convenient for the python console\r\n globals()['nfw'] = nfw = slicer.modules.NeedleFinderWidget\r\n globals()['nfl'] = nfl = slicer.modules.NeedleFinderWidget.logic\r\n print \"nfl -> NeedleFinderLogic\"\r\n print \"nfw -> NeedleFinderWidget\"", "def start(self):\n keyfile = self._getKeyPath()\n if j.do.getSSHKeyPathFromAgent(\"$(key.name)\", die=False) is None:\n cmd = 'ssh-add %s' % keyfile\n j.do.executeInteractive(cmd)", "def press_on_configure_ssh(driver):\n assert wait_on_element(driver, 5, xpaths.services.ssh_Service_Button, 'clickable')\n driver.find_element_by_xpath(xpaths.services.ssh_Service_Button).click()", "def open(self):\n logging.debug('Connecting to device %s' % self.paramiko_cfg.get('hostname'))\n self.ssh = paramiko.SSHClient()\n self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n self.ssh.connect(**self.paramiko_cfg)", "def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):\n\n authorized_keys = join(environ['HOME'], '.ssh', 'authorized_keys')\n if not exists(dirname(authorized_keys)):\n makedirs(dirname(authorized_keys))\n # Restrict features and force all SSH commands to go through our script\n with open(authorized_keys, 'a') as h:\n h.write(\"\"\"command=\"FINGERPRINT={ssh_fingerprint:s} NAME=default {script_path:s} $SSH_ORIGINAL_COMMAND\",no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding {pubkey:s}\\n\"\"\".format(**locals()))\n chmod(dirname(authorized_keys), S_IRUSR | S_IWUSR | S_IXUSR)\n chmod(authorized_keys, S_IRUSR | S_IWUSR)", "def set_shortcut_key(self):\n self.shortcutKey = self.shortcutComboBox2.currentText()\n self.iniSettings.setValue(\"shortcut\", self.shortcutKey)\n self.register_shortcut_listener()\n if self.shortcutKey == \"ESC\":\n self.shortcutKeyHex = 0x1B\n elif self.shortcutKey == \"F1\":\n self.shortcutKeyHex = 0x70\n elif self.shortcutKey == \"F2\":\n self.shortcutKeyHex = 0x71\n elif self.shortcutKey == \"F3\":\n self.shortcutKeyHex = 0x72\n elif self.shortcutKey == \"F4\":\n self.shortcutKeyHex = 0x73\n elif self.shortcutKey == \"F5\":\n self.shortcutKeyHex = 0x74\n elif self.shortcutKey == \"F6\":\n self.shortcutKeyHex = 0x75\n elif self.shortcutKey == \"F7\":\n self.shortcutKeyHex = 0x76\n elif self.shortcutKey == \"F8\":\n self.shortcutKeyHex = 0x77\n elif self.shortcutKey == \"F9\":\n self.shortcutKeyHex = 0x78\n elif self.shortcutKey == \"F10\":\n self.shortcutKeyHex = 0x79\n elif self.shortcutKey == \"1\":\n self.shortcutKeyHex = 0x31\n elif self.shortcutKey == \"2\":\n self.shortcutKeyHex = 0x32\n elif self.shortcutKey == \"3\":\n self.shortcutKeyHex = 0x33\n elif self.shortcutKey == \"4\":\n self.shortcutKeyHex = 0x34\n elif self.shortcutKey == \"5\":\n self.shortcutKeyHex = 0x35\n elif self.shortcutKey == \"6\":\n self.shortcutKeyHex = 0x36\n elif self.shortcutKey == \"7\":\n self.shortcutKeyHex = 0x37\n elif self.shortcutKey == \"8\":\n self.shortcutKeyHex = 0x38\n elif self.shortcutKey == \"9\":\n self.shortcutKeyHex = 0x39\n elif self.shortcutKey == \"0\":\n self.shortcutKeyHex = 0x30", "def ssh_cmd(ctx):\n pass", "def setupShortcuts(self):\n\n macros = (\n (\"Ctrl+Return\", self.segmentNeedle),\n (\"Ctrl+z\", self.logic.deleteLastNeedle),\n )\n\n for keys,f in macros:\n k = qt.QKeySequence(keys)\n s = qt.QShortcut(k,slicer.util.mainWindow())\n s.connect('activated()', f)\n s.connect('activatedAmbiguously()', f)\n print \"SlicerRC - '%s' -> '%s'\" % (keys, f.__name__)", "def __init__(self, settings, server=None):\n print(\"SSH Action Handler Started\")\n self.server = server\n self.active_ssh_tasks = {}\n self.key_location = settings[\"ssh_key_location\"]\n self.server_addr = settings[\"ssh_server_addr\"]\n self.server_username = settings[\"ssh_server_username\"]", "def create_shortcut_to_desktop(target,title):\n s = os.path.basename(target)\n fname = os.path.splitext(s)[0]\n winshell.CreateShortcut(Path = os.path.join(winshell.desktop(), fname + '.lnk'),\n Target = target,\n Icon=(target, 0),\n Description=title)", "def ssh_tunnel(self, ssh_tunnel):\n\n self._ssh_tunnel = ssh_tunnel", "def __init__(self,\n comms_address: str,\n args: str = host_utils.DEFAULT_SSH_OPTIONS,\n key_info: Optional[data_types.KeyInfo] = None,\n log_cmd: str = \"\",\n auto_reopen: bool = True,\n open_on_start: bool = True,\n username: str = \"root\"):\n self.comms_address = comms_address\n args = host_utils.generate_ssh_args(\n comms_address,\n log_cmd,\n username,\n options=args,\n key_info=key_info)\n super().__init__(\n command=\"ssh\",\n args=args,\n auto_reopen=auto_reopen,\n open_on_start=open_on_start)", "def openSSH(target, user):\r\n ssh = paramiko.SSHClient()\r\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r\n ssh.connect(target, username=user)\r\n return ssh", "def test_ssh(self):\n self._test_ssh(self.git_ssh_path)", "def ssh(ssh, app, command):\n if ssh is None:\n ssh = _DEFAULT_SSH\n\n if app.find('#') == -1:\n # Instance is not specified, list matching and exit.\n raise click.BadParameter('Specify full instance name: xxx#nnn')\n\n app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh')\n app_discovery.sync()\n\n # Restore default signal mask disabled by python spawning new thread\n # for Zk connection.\n #\n # TODO: should this be done as part of zkutils.connect?\n for sig in range(1, signal.NSIG):\n try:\n signal.signal(sig, signal.SIG_DFL)\n except OSError:\n pass\n\n # TODO: not sure how to handle mutliple instances.\n for (app, hostport) in app_discovery.items():\n _LOGGER.info('%s :: %s', app, hostport)\n if hostport:\n host, port = hostport.split(b':')\n run_ssh(host, port, ssh, list(command))", "def assign_hotkey(command_name, annotation_str, command_string):\n key_str = None\n if command_name == 'mimic_toggleIkFkMode':\n key_str = pm.textField('t_toggleIkFk', query=True, text=True)\n elif command_name == 'mimic_keyIkFk':\n key_str = pm.textField('t_keyIkFk', query=True, text=True)\n\n if len(key_str) > 1:\n pm.warning('Hotkey must be a single character; no hotkey set')\n return\n\n # Check if the user is in Maya's locked default hotkey set.\n if pm.hotkeySet(query=True, current=True) == 'Maya_Default':\n # If so, try switching to the default Mimic Hotkey Set\n if pm.hotkeySet('Mimic_Hotkeys', exists=True):\n pm.hotkeySet('Mimic_Hotkeys', current=True, edit=True)\n print('Hotkey Set changed to Mimic Hotkeys')\n # If Mimic Hotkey set doesn't exist, prompt the user to create a custom\n # Hotkey set and switch to it.\n else:\n hotkey_set_created = _create_hotkey_set()\n # If the user does not create a new hotkey set, exit the function\n if not hotkey_set_created:\n pm.warning('No custom hotkey profile created; ' \\\n 'No Mimic Hotkey set')\n return\n\n if key_str:\n if pm.runTimeCommand(command_name, exists=True):\n pass\n else:\n pm.runTimeCommand(command_name,\n category='Custom Scripts',\n annotation=annotation_str,\n command=command_string,\n commandLanguage='python')\n\n hotkey_name = command_name + 'Hotkey'\n\n if pm.hotkey(key_str, query=True):\n if pm.hotkey(key_str, query=True, name=True) == hotkey_name:\n print('Hotkey ' \\\n '\\'{}\\' ' \\\n 'already set to ' \\\n '\\'{}\\''.format(key_str, hotkey_name))\n\n else:\n pm.warning('Hotkey ' \\\n '\\'{}\\' ' \\\n 'already in use by another function' \\\n .format(key_str))\n else:\n pm.nameCommand(hotkey_name,\n command=command_name,\n annotation=annotation_str)\n\n pm.hotkey(keyShortcut=key_str,\n name=hotkey_name)\n\n print('{} hotkey set to \\'{}\\' key'.format(command_name, key_str))\n else:\n pm.warning('No key string input; ' \\\n 'input a key string in Mimic UI')\n\n pm.setFocus('prefs_tab_layout')", "def connect(self, driver):\n # 0 1 2\n events = [driver.password_re, self.device.prompt_re, driver.unable_to_connect_re,\n # 3 4 5 6 7\n NEWSSHKEY, KNOWN_HOSTS, HOST_KEY_FAILED, MODULUS_TOO_SMALL, PROTOCOL_DIFFER,\n # 8 9\n driver.timeout_re, pexpect.TIMEOUT]\n\n transitions = [\n (driver.password_re, [0, 1, 4, 5], -1, partial(a_save_last_pattern, self), 0),\n (self.device.prompt_re, [0], -1, partial(a_save_last_pattern, self), 0),\n # cover all messages indicating that connection was not set up\n (driver.unable_to_connect_re, [0], -1, a_unable_to_connect, 0),\n (NEWSSHKEY, [0], 1, partial(a_send_line, \"yes\"), 10),\n (KNOWN_HOSTS, [0, 1], 0, None, 0),\n (HOST_KEY_FAILED, [0], -1, ConnectionError(\"Host key failed\", self.hostname), 0),\n (MODULUS_TOO_SMALL, [0], 0, self.fallback_to_sshv1, 0),\n (PROTOCOL_DIFFER, [0], 4, self.fallback_to_sshv1, 0),\n (PROTOCOL_DIFFER, [4], -1, ConnectionError(\"Protocol version differs\", self.hostname), 0),\n (pexpect.TIMEOUT, [0], 5, partial(a_send, \"\\r\\n\"), 10),\n (pexpect.TIMEOUT, [5], -1, ConnectionTimeoutError(\"Connection timeout\", self.hostname), 0),\n (driver.timeout_re, [0], -1, ConnectionTimeoutError(\"Connection timeout\", self.hostname), 0),\n ]\n\n logger.debug(\"EXPECTED_PROMPT={}\".format(pattern_to_str(self.device.prompt_re)))\n fsm = FSM(\"SSH-CONNECT\", self.device, events, transitions, timeout=_C['connect_timeout'],\n searchwindowsize=160)\n return fsm.run()", "def init_hotkeys(self):\n\n\t\tself._interface.init_hotkeys()", "def enable_shortcut_key(self, enable=True):\r\n self.enable_shortcut = enable", "def main():\r\n parser = argparse.ArgumentParser(description=\"\"\"Starts SSH session with one\r\n of ARC\\'s Raspberrypis.\"\"\")\r\n\r\n parser.add_argument('usr', help='Username for the remote device.')\r\n parser.add_argument('pwd', help='Password for [email protected].')\r\n\r\n args = parser.parse_args()\r\n\r\n address = get_IP(IP_list(args.pwd), args.usr)\r\n os.system(\"ssh \" + \"pi\" + \"@\" + address)", "def _AddPerInstanceSshkey(self):\n if self._ssh_public_key_path:\n rsa = self._LoadSshPublicKey(self._ssh_public_key_path)\n logger.info(\"ssh_public_key_path is specified in config: %s, \"\n \"will add the key to the instance.\",\n self._ssh_public_key_path)\n self._metadata[\"sshKeys\"] = \"{0}:{2}\\n{1}:{2}\".format(getpass.getuser(),\n constants.GCE_USER,\n rsa)\n else:\n logger.warning(\n \"ssh_public_key_path is not specified in config, \"\n \"only project-wide key will be effective.\")", "def _ssh(ip, *, user=None, key=None, port=8888):\n # Need to replace \".\", because I don't want \n # `ip` to be a keyword argument\n if ip == \".\" or ip == \"...\": ip = None \n func_args = locals()\n conf = Bunch(**func_args)\n \n # Loads default config if there is one\n # and update the conf object with data\n # from it, but function args have precedence\n fname = os.path.expanduser(\"~/.nbx/aws.json\")\n fname = Path(fname)\n if fname.is_file(): \n stored = load(fname)\n for k,v in stored.items():\n # Function args have precedence\n if conf[k] is None: conf[k] = v\n \n # Check if we got everything we need to\n # connect to instance\n fail = False\n for k in [\"ip\", \"user\", \"key\", \"port\"]:\n if conf[k] is None:\n fail = True\n print(f\"Please provide --{k}\")\n \n # Save what we already got, and\n # proceed if we got everything or return\n dump(conf, fname)\n if fail: return\n \n config_str = SSH_CONFIG_TEMPLATE.format(\n host=\"aws\", \n user=conf.user, \n ip=conf.ip, \n key=conf.key\n )\n print(config_str)\n dump(config_str, os.path.expanduser(\"~/.ssh/ec2_config\"), format=\".txt\")\n \n # We could write some environment vars\n # but we can't source them from here\n #\n # fname = os.path.expanduser(\"~/.nbx/.bash_aws\")\n # string = f\"export xaws={conf.user}@{conf.ip};\\n\"\n # dump(string, fname, format=\".txt\")\n\n # Connect to server and forward local port 8888 to remote port 8888\n # We can now connect to a remote jupyter notebook server via `http://localhost:8888/`\n cmd = f\"ssh -i {conf.key} -L {conf.port}:localhost:{conf.port} {conf.user}@{conf.ip}\"\n os.system(f'bash -c \\\"{cmd}\\\"')", "def _setup_server_connection(self):\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n try:\n client.load_system_host_keys()\n except IOError:\n print(\"Could not find ssh host keys.\")\n ssh_known_hosts = input(\"Provide ssh known_hosts key file =\")\n while True:\n try:\n client.load_system_host_keys(str(ssh_known_hosts))\n break\n except IOError:\n print(\"Cannot read file, try again\")\n ssh_known_hosts = input(\"Provide ssh known_hosts key file =\")\n\n server_user = server_setup.get_server_user()\n client.connect(\n server_setup.SERVER_ADDRESS,\n username=server_user,\n port=server_setup.SERVER_SSH_PORT,\n timeout=10,\n )\n\n self._ssh = client", "def _connect(self):\n self.client = SSHClient()\n self.client.load_system_host_keys()\n self.client.set_missing_host_key_policy(AutoAddPolicy())\n self.client.connect(self.host,\n username=self.user,\n key_filename=self.filepath,\n look_for_keys=True,\n timeout=5000)\n self.scp = SCPClient(self.client.get_transport())", "def do_shortcuts(self, args):\n result = \"\\n\".join('%s: %s' % (sc[0], sc[1]) for sc in sorted(self.shortcuts))\n self.stdout.write(\"Single-key shortcuts for other commands:\\n{}\\n\".format(result))", "def __call__(self, argv, help):\n from ploy.common import sorted_choices\n parser = argparse.ArgumentParser(\n prog=\"%s fab\" % self.ctrl.progname,\n description=help,\n add_help=False,\n )\n instances = self.ctrl.get_instances(command='init_ssh_key')\n parser.add_argument(\"instance\", nargs=1,\n metavar=\"instance\",\n help=\"Name of the instance from the config.\",\n type=str,\n choices=sorted_choices(instances))\n parser.add_argument(\"fabric_opts\",\n metavar=\"...\", nargs=argparse.REMAINDER,\n help=\"Fabric options\")\n args = parser.parse_args(argv)\n\n instance = instances[args.instance[0]]\n with fabric_integration(self.ctrl, instance, fabcmd=True):\n from fabric.main import main\n fabfile = get_fabfile(instance)\n newargv = ['fab', '-f', fabfile]\n if args.fabric_opts:\n newargv = newargv + args.fabric_opts\n with sys_argv(newargv):\n main()", "def __init__(self, args, shell, userns):\n super(SSHMgr, self).__init__(args, shell, userns)\n parser = MagicArgumentParser()\n parser.add_argument('--host', type=str, default='localhost',\n help='Machine to reach (default = localhost)')\n parser.add_argument('--pid', type=str,\n help='Variable to store SSH process pid')\n _args, cmd = parser.parse_known_args(args)\n self.cmd = self._wlbin + [_args.host, ] + cmd\n # SSH Cannot fork into background without a command to execute.\n # Popen instance is created in submit", "def setup_sshd(self):\n # Update apt repository\n command = 'apt update -y > /dev/null 2>&1'\n if self.debug is True:\n print('Executing apt update -y ')\n try:\n os.system('echo %s| sudo -S %s' % (self.sudo_pw, command))\n except:\n print(\"An error occured during 'apt update -u'\")\n\n # Install ssh package\n command = 'apt install ssh -y > /dev/null 2>&1'\n if self.debug is True:\n print('Executing apt install ssh -y')\n try:\n os.system('echo %s| sudo -S %s' % (self.sudo_pw, command))\n except:\n print(\"An error occured during 'apt install ssh -y' while installing ssh\")\n\n # Configure sshd using the config\n self.config_sshd()\n\n # Reload sshd config\n try:\n command = \"service ssh restart > /dev/null 2>&1\"\n os.system('echo %s| sudo -S %s' % (self.sudo_pw, command))\n print('SSHD_installed and configured successfully, SSHD listening on port {}'.format(self.ssh_port))\n except:\n print('An error occured during ssh \"sudo service ssh reload\" while installing ssh')", "def connect(self, instance):\n client = sshclient.SSHClient()\n client.set_missing_host_key_policy(sshclient.AutoAddPolicy())\n client.connect(instance.ip_address, username=\"core\",\n key_filename=self._ssh_keyfile)\n return client", "def _connect(self):\n self.ssh_conn = paramiko.SSHClient()\n if self.debug:\n self.ssh_conn.log = paramiko.common.logging.basicConfig(\n level=paramiko.common.DEBUG)\n # \"known_hosts\" is ignored, so there's no potential for mismatched keys\n self.ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n # The default for allow_agent (False) breaks SSH to some devices\n self.ssh_conn.connect(self.device, username=self.username,\n password=self.passwd, allow_agent=False)\n self.ssh_shell = self.ssh_conn.invoke_shell()\n self.ssh_shell.set_combine_stderr(True)\n self.ssh_shell.setblocking(True)", "def add_local_ssh_key(self):\n return\n user_ssh_key = open('/home/<$user>/.ssh/id_rsa.pub').read()\n key = digitalocean.SSHKey(token=self.secret_token,\n name='machine-name',\n public_key=user_ssh_key)\n key.create()", "def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break", "def open(self):\n class IgnorePolicy(paramiko.MissingHostKeyPolicy):\n \"\"\"\n Policy for ignoring missing host keys.\n\n TODO: It would be better to know and confirm the host key.\n \"\"\"\n\n def missing_host_key(self, client, hostname, key):\n return\n\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(IgnorePolicy())\n client.connect(\n hostname=self._ssh_config.ip,\n username=self._ssh_config.user,\n pkey=paramiko.rsakey.RSAKey.from_private_key(io.StringIO(self._ssh_config.key))\n )\n\n self._paramiko_ssh_client = client", "def init_keystrokes(self):\n import x84.bbs.session\n term = x84.bbs.session.getterminal()\n self.keyset['home'].append(term.KEY_HOME)\n self.keyset['end'].append(term.KEY_END)\n self.keyset['pgup'].append(term.KEY_PGUP)\n self.keyset['pgdown'].append(term.KEY_PGDOWN)\n self.keyset['up'].append(term.KEY_UP)\n self.keyset['down'].append(term.KEY_DOWN)\n self.keyset['down'].append(term.KEY_ENTER)\n self.keyset['exit'].append(term.KEY_ESCAPE)", "def ssh(self, new=False, waitUp=True):\n if new:\n return Ssh(self, \"\".join([self.username, '@', self.url]))\n\n if self._ssh:\n return self._ssh\n\n self._ssh = Ssh(self, \"\".join([self.username, '@', self.url]))\n return self._ssh", "async def configure_ssh_proxy(self, application, task=None):\n debug(\"Configuring ssh proxy for {}\".format(application))\n\n mgmtaddr = self.get_container_ip(\n self.state[application]['container'],\n )\n\n debug(\n \"Setting ssh-hostname for {} to {}\".format(\n application,\n mgmtaddr,\n )\n )\n\n await self.n2vc.ExecutePrimitive(\n self.ns_name,\n application,\n \"config\",\n None,\n params={\n 'ssh-hostname': mgmtaddr,\n 'ssh-username': 'ubuntu',\n }\n )\n\n return True", "def _activate_ssh_coordinator(self, coordinator_constructor):\n self._has_ssh_devices = True\n self._ssh_coord = coordinator_constructor(self)\n\n return", "def add_ssh_key(self, user_id, title, ssh_key):\n _gu = self.get_user(user_id)\n if _gu is None:\n return None\n\n # build URL and make request\n return self._post(\n '/users/{0}/keys'.format(_gu['id']),\n data={'title': title, 'key': ssh_key},\n )", "def ssh(host_=None):\n run_command_on_selected_server(open_shell, host_=host_)", "def Run(self, args):\n self.scp_executable = files.FindExecutableOnPath('scp')\n self.ssh_executable = files.FindExecutableOnPath('ssh')\n self.ssh_keygen_executable = files.FindExecutableOnPath('ssh-keygen')\n if (not self.scp_executable or\n not self.ssh_executable or\n not self.ssh_keygen_executable):\n raise exceptions.ToolException('Your platform does not support OpenSSH.')\n\n self.ssh_key_file = os.path.realpath(os.path.expanduser(\n args.ssh_key_file or constants.DEFAULT_SSH_KEY_FILE))", "def keyShortcuts(self):\n\n # Open file: CTRl+O\n self.openVideoSc = QShortcut(QKeySequence('Ctrl+O'), self)\n self.openVideoSc.activated.connect(self.open_video)\n\n # Quit: CTRl+Q\n self.quitSc = QShortcut(QKeySequence('Ctrl+Q'), self)\n self.quitSc.activated.connect(self.close)\n\n # Open annotation: CTRl+I\n self.openAnnotationSc = QShortcut(QKeySequence('Ctrl+I'), self)\n self.openAnnotationSc.activated.connect(self.open_annotation)\n\n # Save annotation: CTRl+S\n self.saveSc = QShortcut(QKeySequence('Ctrl+S'), self)\n self.saveSc.activated.connect(self.save_annotation)\n\n # New file: CTRl+N\n self.resetSc = QShortcut(QKeySequence('Ctrl+N'), self)\n self.resetSc.activated.connect(self.new_file)\n\n # Clear annotation: CTRl+C\n self.resetSc = QShortcut(QKeySequence('Ctrl+C'), self)\n self.resetSc.activated.connect(self.clear_annotation)\n\n # Shortcuts: CTRl+H\n self.resetSc = QShortcut(QKeySequence('Ctrl+H'), self)\n self.resetSc.activated.connect(self.show_help)", "def command():\n server = get_server()\n port = get_port()\n \n click.echo(f'{server.get(\"hostname\")}:{port} -> localhost:{port}')\n click.echo('CTRL+C for quit')\n bash('ssh -N -L {port}:localhost:{port} -i {ssh_key_path} {username}@{hostname}'.format(\n ssh_key_path=server.get('ssh_key_path'),\n username=server.get('username'),\n hostname=server.get('hostname'),\n port=port\n ))", "def connect_to_ssh_host(self, host, port = 22, user = \"omc\", passwd = \"omc\", prompt = \"\", timeout = \"60sec\"):\n if prompt == None or prompt == \"\":\n myprompt = '#'\n # myprompt = None\n else:\n myprompt = prompt\n\n conn = MySshLib(timeout, \"CR\", myprompt)\n conn.open_connection(host, port=port)\n conn.login(user, passwd)\n\n self._ssh_connections[conn] = 'Linux'\n self._current = conn\n self._current._prompt = myprompt\n\n return conn", "def install_proxy(pubkey, command):\n with open(pubkey) as fin:\n keydata = fin.read()\n sshdir = os.path.join(util.get_homedir(), '.ssh')\n authentry = 'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding'\n authentry += ',command=\"{}\",from=\"127.0.0.1,::1\" '.format(command)\n authentry += keydata\n authfile = os.path.join(sshdir, 'authorized_keys')\n with open(authfile, 'a') as fout:\n fout.write(authentry)\n os.chmod(authfile, 0o600)", "def fl_set_object_shortcutkey(ptr_flobject, keysym):\n _fl_set_object_shortcutkey = library.cfuncproto(\n library.load_so_libforms(), \"fl_set_object_shortcutkey\",\n None, [cty.POINTER(xfdata.FL_OBJECT), cty.c_uint], \\\n \"\"\"void fl_set_object_shortcutkey(FL_OBJECT * obj,\n unsigned int keysym) \"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n ui_keysym = library.convert_to_uintc(keysym)\n library.keep_elem_refs(ptr_flobject, keysym, ui_keysym)\n _fl_set_object_shortcutkey(ptr_flobject, ui_keysym)", "def do_shortcuts(self, args):\n result = \"\\n\".join('%s: %s' % (sc[0], sc[1]) for sc in sorted(self.shortcuts))\n self.poutput(\"Shortcuts for other commands:\\n{}\\n\".format(result))", "def attach(self):\r\n sshpass = \"sshpass -p \\\"akanksha1\\\"\"\r\n remote_Station = \"[email protected]\"\r\n base = \"ssh -t \" + options[\"username\"] + \"@\" + options[\"server\"]\r\n\r\n screen = \" screen -r \"\r\n if self.device_type == \"Wireless_access_point\":\r\n screen += \"WAP_%d\" % self.getID()\r\n elif self.device_type == \"yRouter\":\r\n yrouter = \"yrouter --interactive=1 --config=/root/script_t1_y1.conf test3\"\r\n screen_yrouter = \"%s ssh %s \\\"source /root/.profile; %s\\\"\"%(sshpass, remote_Station, yrouter)\r\n else:\r\n name = self.getName()\r\n pid = mainWidgets[\"tm\"].getPID(name)\r\n if not pid:\r\n return\r\n screen += pid + \".\" + name\r\n\r\n command = \"\"\r\n\r\n window_name = str(self.getProperty(\"Name\")) # the strcast is necessary for cloning\r\n if(self.getName() != window_name):\r\n window_name += \" (\" + self.getName() + \")\"\r\n if environ[\"os\"] == \"Windows\":\r\n\r\n startpath = environ[\"tmp\"] + self.getName() + \".start\"\r\n try:\r\n outfile = open(startpath, \"w\")\r\n outfile.write(screen)\r\n outfile.close()\r\n except:\r\n mainWidgets[\"log\"].append(\"Failed to write to start file!\")\r\n return\r\n\r\n command += \"putty -\"\r\n if options[\"session\"]:\r\n command += \"load \" + options[\"session\"] + \" -l \" + options[\"username\"] + \" -t\"\r\n else:\r\n command += base\r\n command += \" -m \\\"\" + startpath + \"\\\"\"\r\n else:\r\n if self.device_type == \"yRouter\":\r\n command += \"rxvt -T \\\"\" + window_name + \"\\\" -e \" + screen_yrouter\r\n else:\r\n command += \"rxvt -T \\\"\" + window_name + \"\\\" -e \" + base + screen\r\n\r\n self.shell = subprocess.Popen(str(command), shell=True)", "def create_ssh_tunnel():\n \n # Reference link: https://sshtunnel.readthedocs.io/en/latest/\n tunnel = SSHTunnelForwarder(\n (config['ip'], 22),\n ssh_username=config['username'],\n ssh_password=config[\"ssh-password\"],\n remote_bind_address=('localhost', 3306),\n )\n\n tunnel.start() \n print(\"SSH Connected\") \n return tunnel", "def ssh_add_key(self, pub_key_file):\n with open(os.path.normpath(pub_key_file), 'rt') as f:\n ssh_key = f.read()\n if fab.env.user == 'root':\n ssh_dir = '/root/.ssh'\n else:\n if 'home_dir' in fab.env:\n ssh_dir = _('%(home_dir)s/.ssh')\n else:\n ssh_dir = _('/home/%(user)s/.ssh')\n\n remote_os = fab.env.os\n remote_os.mkdir(ssh_dir)\n fab_files.append('%s/authorized_keys' % ssh_dir, ssh_key)\n\n with fab.settings(warn_only=True): # no chmod in system\n remote_os.set_permissions(ssh_dir, pattern='700')\n remote_os.set_permissions('%s/authorized_keys' % ssh_dir,\n pattern='600')", "def _connect(self):\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(\n self.hostname,\n username=self.user,\n port=self.port,\n pkey=get_pkey(self.issho_conf[\"RSA_ID_PATH\"]),\n )\n return ssh", "def SSH(*args, **kwargs):\n method = import_class(settings.ORCHESTRATION_SSH_METHOD_BACKEND)\n return method(*args, **kwargs)", "def create_shortcuts():\n desktop_path = os.path.expanduser(\"~\") + \"/Desktop/\"\n oxide_path = os.getcwd()\n\n launch_oxide = \"[Desktop Entry]\\nVersion=1.0\\nName=Launch Oxide\\nComment=Launch Oxide Shell\\n\\\n Exec=-e %s/oxide\\nIcon=%s/docs/logos/Oxide_Logo.png\\n\\\n Terminal=true\\nType=Application\\nCategories=Utility;\\nStartupNotify=true\" % (oxide_path, oxide_path)\n\n launch_oxide_root = \"[Desktop Entry]\\nVersion=1.0\\nName=Launch Oxide as Root\\nComment=Launch Oxide Shell as Root\\n\\\n Exec=-e %s/oxide -r\\nIcon=%s/docs/logos/Oxide_Root_Logo.png\\n\\\n Terminal=true\\nType=Application\\nCategories=Utility;\\nStartupNotify=true\" % (oxide_path, oxide_path)\n\n launch_oxide_update = \"[Desktop Entry]\\nVersion=1.0\\nName=Update Oxide\\nComment=Launch Oxide Updater\\n\\\n Exec=-e %s/utils/update.sh -r\\nIcon=%s/docs/logos/Oxide_Update_Logo.png\\n\\\n Terminal=true\\nType=Application\\nCategories=Utility;\\nStartupNotify=true\" % (oxide_path, oxide_path)\n\n with open(desktop_path + 'oxide3.desktop', 'w') as f:\n f.write(launch_oxide_update)\n\n with open(desktop_path + 'oxide2.desktop', 'w') as f:\n f.write(launch_oxide_root)\n\n with open(desktop_path + 'oxide1.desktop', 'w') as f:\n f.write(launch_oxide)", "async def _init_jump_host_connection(\n self,\n options: asyncssh.SSHClientConnectionOptions) -> None:\n\n if self._tunnel:\n return\n\n if self.jump_host_key:\n jump_host_options = asyncssh.SSHClientConnectionOptions(\n client_keys=self.jump_host_key,\n login_timeout=self.connect_timeout,\n )\n\n if self.ignore_known_hosts:\n jump_host_options = asyncssh.SSHClientConnectionOptions(\n options=jump_host_options,\n known_hosts=None\n )\n if self.ssh_config_file:\n jump_host_options = asyncssh.SSHClientConnectionOptions(\n options=jump_host_options,\n config_file=[self.ssh_config_file]\n )\n else:\n jump_host_options = options\n\n try:\n if self.jump_host:\n self.logger.info(\n 'Using jump host: {}, with username: {}, and port: {}'\n .format(self.jump_host, self.jump_user, self.jump_port)\n )\n self._tunnel = await asyncssh.connect(\n self.jump_host, port=self.jump_port,\n options=jump_host_options, username=self.jump_user)\n self.logger.info(\n f'Connection to jump host {self.jump_host} succeeded')\n\n except Exception as e:\n if self.sigend:\n await self._terminate()\n return\n self.logger.error(\n f\"ERROR: Cannot connect to jump host: {self.jump_host}, \"\n f\" {str(e)}\")\n self.current_exception = e\n self._conn = None\n self._tunnel = None\n\n return", "def open_key_shortcuts(on_open, *args, **kwargs):\n\n open_menu('/Edit/Key Shortcuts...', on_open, [], args, kwargs)", "def pssh(self, pssh):\n self._pssh = pssh\n return self", "def __enter__(self):\n self.ssh = paramiko.SSHClient()\n self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n self.ssh.connect(self.host, username=self.user, port=self.port, password=self.password)\n return self", "def do_create(env):\n keyname = env.config['proxy']['key_name']\n proxyname = env.config['proxy']['proxy_name']\n keyfile = create_ssh_keypair(keyname, proxyname)\n proxyfile = create_proxy(proxyname)\n install_proxy(keyfile + '.pub', proxyfile)\n test_proxy(keyfile)\n print('Private key created as: `~/.ssh/{}`.'.format(keyname))\n print('Proxy created at: `~/bin/{}`.'.format(proxyname))", "def connectSsh(self):\n connect_handle = pexpect.spawn(\"ssh -q -o StrictHostKeyChecking=no root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n #connect_handle.logfile_send = sys.stdout\n i = 0\n ssh_newkey = r'(?i)Are you sure you want to continue connecting'\n remote_key_changed = r\"REMOTE HOST IDENTIFICATION HAS CHANGED\"\n\n perm_denied = r\"(?i)Permission denied\"\n while True:\n i = connect_handle.expect([ssh_newkey, 'assword:',self.promptshell,\n pexpect.EOF, pexpect.TIMEOUT,\n remote_key_changed, perm_denied])\n if i==0:\n connect_handle.sendline('yes')\n continue\n elif i==1:\n logger.info(\"Password supplied\")\n connect_handle.sendline(self.password)\n continue\n\t elif i==2:\n self._mode = CLI_MODES.shell\n self._prompt = self.promptshell\n break\n elif i==3:\n logger.info(\"Connection closed: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Connection Closed: %s\" % self)\n elif i==4:\n logger.warning(\"Timeout while waiting for connection\")\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Unable to establish connection %s\" % self)\n elif i==5:\n logger.warn(\"Removing offending key from .known_hosts..\")\n known_hosts_file = os.path.expanduser(\"~/.ssh/known_hosts\")\n\n if \"darwin\" in sys.platform.lower():\n # MAC OS\n utils.run_cmd(\"sed -i 1 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n elif \"linux\" in sys.platform.lower():\n # Linux\n utils.run_cmd(\"sed -i 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n\n connect_handle = pexpect.spawn(\"ssh root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n\n continue\n elif i==6:\n logger.warning(\"Permission denied: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Permission denied: %s.\" % self)\n return connect_handle", "def ssh(pi):\n command = \"ssh {0}\".format(pi)\n subprocess.Popen(command, shell=True)", "def createShortcut(path, target, wdir = None, icon = None, args = None):\n ext = path[-3:]\n if ext == 'url':\n shortcut = file(path, 'w')\n shortcut.write('[InternetShortcut]\\n')\n shortcut.write('URL=%s' % target)\n shortcut.close()\n elif ext == 'lnk':\n shell = Dispatch('WScript.Shell')\n shortcut = shell.CreateShortCut(path)\n shortcut.Targetpath = target\n if args:\n shortcut.Arguments = args\n if wdir:\n shortcut.WorkingDirectory = wdir\n else: \n shortcut.WorkingDirectory = os.path.dirname(target)\n if icon:\n shortcut.IconLocation = icon\n shortcut.save()\n else:\n raise Exception(\"invalid extension '%s' - allowed values: url | lnk\" % ext)", "def ssh():\n vbox = Vbox(env.vm_name)\n with vbox as session:\n session.wait_for_ssh()\n open_shell()", "def __init__(self, connector=pxssh.pxssh()):\n self.connector = connector\n\n # pxssh.UNIQUE_PROMPT is \"\\[PEXPECT\\][\\$\\#] \", set prompt for csh\n # should not contain slash(\\)\n if isinstance(self.connector, pxssh.pxssh):\n self.connector.PROMPT_SET_CSH = \"set prompt='[PEXPECT]$ '\"\n\n # Echo command result\n self.echo_cmd_result = \"\"", "def __init__(self, globalKey, publicKey, resourceName, **rest):\n super(SshKey, self).__init__({\n \"globalKey\": globalKey,\n \"publicKey\": publicKey,\n \"resourceName\": resourceName,\n }, **rest)", "def install_keys():\n\n # get proxy list from proxylb\n local('scp alpha@proxylb:proxyrotate/proxies.list .')\n if os.path.isfile('proxies.list'):\n for line in open('proxies.list'):\n ip = line.strip().split(',')[0].strip()\n env.host_string = ip\n env.user = 'alpha'\n local('scp id_rsa.pub alpha@%s:' % ip)\n run('cat id_rsa.pub >> .ssh/authorized_keys')", "def __init__(__self__, *,\n admin_username: Optional[pulumi.Input[str]] = None,\n ssh: Optional[pulumi.Input['LinuxProfilePropertiesSshArgs']] = None):\n if admin_username is not None:\n pulumi.set(__self__, \"admin_username\", admin_username)\n if ssh is not None:\n pulumi.set(__self__, \"ssh\", ssh)", "def connect_new_ssh(child, password):\n child.sendline('yes');\n index = child.expect('password: ');\n if index == 0:\n child.sendline(password);", "def __init__(__self__, *,\n admin_username: pulumi.Input[str],\n ssh: pulumi.Input['ContainerServiceSshConfigurationArgs']):\n pulumi.set(__self__, \"admin_username\", admin_username)\n pulumi.set(__self__, \"ssh\", ssh)", "def set_ssh_keys(self, ssh_keys):\n self.ssh_keys = {}\n self.ssh_keys_private = {}\n for user_name in ssh_keys:\n key = ssh_keys[user_name]\n if key.startswith('file:'):\n public_key_file = key.split('file:')[1]\n with open(public_key_file) as fd:\n key = fd.read()\n # try to open private key\n private_key_file = public_key_file.split('.pub')[0]\n try:\n with open(private_key_file) as fd:\n self.ssh_keys_private[user_name] = private_key_file\n except FileNotFoundError:\n pass\n\n self.ssh_keys[user_name] = key.strip()\n if user_name == 'root':\n # check if the private key is available:\n # (1) check ssh-agent\n # (2) check for private key file\n command = \"echo {} | ssh-keygen -l -f - | awk '{{ print $2 }}'\"\n finger = check_output(command.format(self.ssh_keys[user_name]),\n shell=True, encoding='ascii')\n try:\n command = 'ssh-add -l | grep -q {}'\n check_call(command.format(finger), shell=True)\n return\n except CalledProcessError:\n if user_name not in self.ssh_keys_private:\n fatal('Could not find matching ssh key for root -',\n 'neither in ssh-agent nor on disk.')", "def create_ssh_keys(self):\n self.random_ssh()\n\n return self.keys", "def _create_ssh_tunnel(self, kernel_channel: KernelChannel, local_port: int, remote_port: int, remote_ip: str,\n server: str, port: int, key: Optional[str] = None):\n channel_name = kernel_channel.value\n self.log.debug(f\"Creating SSH tunnel for '{channel_name}': 127.0.0.1:'{local_port}' \"\n f\"to '{remote_ip}':'{remote_port}'\")\n try:\n process = RemoteProvisionerBase._spawn_ssh_tunnel(local_port, remote_port, remote_ip, server, port, key)\n self.tunnel_processes[channel_name] = process\n except Exception as e:\n self.log_and_raise(RuntimeError(f\"Could not open SSH tunnel for port {channel_name}. Exception: '{e}'\"),\n chained=e)", "def _ssh(self, command, use_pwd=True, use_tty=False, forward_x=False, verbose=False):\n if use_pwd:\n cd_cmd = 'cd cluster_test_%d; ' % self.address[1]\n else:\n cd_cmd = ''\n ssh = ['ssh',\n '-o', 'UserKnownHostsFile=/dev/null',\n '-o', 'StrictHostKeyChecking=no',\n '-o', 'IdentitiesOnly=yes']\n if self.key_file:\n ssh.extend(['-i', self.key_file])\n if use_tty:\n ssh.extend(['-t'])\n \n if forward_x:\n ssh.extend(['-Y'])\n \n ssh.extend([self.user_name + '@' + self.address[0], cd_cmd + command])\n \n if verbose: print(\" \".join(ssh))\n \n # Check whether ssh runs successfully.\n if subprocess.call(ssh) == 0:\n return True\n else:\n return False", "def pushkey(self, addr, passwd, keyname=\"\", pubkey=\"\", port=22, login=\"root\"):\n ExecutorSSH(addr, port=port, login=login, passwd=passwd, pushkey=keyname, pubkey=pubkey)", "def hotkey(*args, altModifier: bool=True, autoSave: bool=True, commandModifier: bool=True,\n ctrlModifier: bool=True, ctxClient: Union[AnyStr, bool]=\"\", dragPress: bool=True,\n factorySettings: bool=True, isModifier: bool=True, keyShortcut: AnyStr=\"\", name:\n Union[AnyStr, bool]=\"\", pressCommandRepeat: bool=True, releaseCommandRepeat:\n bool=True, releaseName: Union[AnyStr, bool]=\"\", shiftModifier: bool=True,\n sourceUserHotkeys: bool=True, q=True, query=True, **kwargs)->Union[None, Any]:\n pass", "def test_use_ssh_file_proxyjump():\n connection = FakeBaseConnection(\n host=\"10.10.10.70\",\n port=22,\n username=\"\",\n password=\"secret\",\n use_keys=True,\n allow_agent=False,\n key_file=\"/home/user/.ssh/id_rsa\",\n timeout=60,\n pkey=None,\n passphrase=None,\n disabled_algorithms=None,\n auth_timeout=None,\n conn_timeout=5,\n banner_timeout=10,\n ssh_config_file=join(RESOURCE_FOLDER, \"ssh_config_proxyjump\"),\n sock=None,\n )\n\n connect_dict = connection._connect_params_dict()\n\n expected = {\n \"hostname\": \"10.10.10.70\",\n \"port\": 8022,\n \"username\": \"admin\",\n \"password\": \"secret\",\n \"look_for_keys\": True,\n \"allow_agent\": False,\n \"key_filename\": \"/home/user/.ssh/id_rsa\",\n \"timeout\": 5,\n \"pkey\": None,\n \"passphrase\": None,\n \"disabled_algorithms\": None,\n \"auth_timeout\": None,\n \"banner_timeout\": 10,\n }\n\n result = connection._use_ssh_config(connect_dict)\n assert \"sock\" in result\n assert \"-W\" in result[\"sock\"].cmd\n del result[\"sock\"]\n assert result == expected", "def setup(bot):\n bot.add_cog(JokeCommands(bot))", "def open_connection_ssh():\n\tssh_server = config_basic.config_ssh_server()\n\tssh_username = config_basic.config_ssh_username()\n\tssh_password = config_basic.config_ssh_password()\n\tconnection = SSH(ssh_server, ssh_username, ssh_password)\n\treturn connection", "def establish_connection(self):\r\n\r\n #creates SSH connection and adds SSH key to .known_hosts\r\n self.ssh_conn = paramiko.SSHClient()\r\n self.ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r\n\r\n try:\r\n self.ssh_conn.connect(**self.conn_parm)\r\n print \"Connected to %s\" % self.conn_parm['hostname']\r\n #testing: self.ssh_conn.close()\r\n except socket.error:\r\n print \"Connection Failed on device %s\" % self.conn_parm['hostname']\r\n\r\n #find prompt\r\n open_session = self.ssh_conn.invoke_shell()\r\n output = open_session.recv(1000)\r\n\r\n #testing: print output\r\n\r\n #go into Enable-Mode if not already in it\r\n if '#' not in output:\r\n open_session.send('enable\\n')\r\n time.sleep(1)\r\n open_session.send(self.password)\r\n open_session.send('\\n')\r\n else:\r\n print \"In Enable-Mode\"\r\n\r\n #turn off paging\r\n open_session.send('terminal length 0\\n')\r\n time.sleep(3)\r\n \r\n return open_session", "def create_ssh_keypair(keyname, comment):\n sshdir = os.path.join(util.get_homedir(), '.ssh')\n util.create_directory(sshdir, 0o700)\n keyfile = os.path.join(sshdir, keyname)\n if util.try_stat(keyfile):\n raise RuntimeError('~/.ssh/{} already exists'.format(keyname))\n subprocess.check_call(['ssh-keygen', '-f', keyfile, '-N', \"\", '-q', '-C', comment])\n os.chmod(keyfile, 0o600)\n os.chmod(keyfile + '.pub', 0o644)\n return keyfile", "def grant_access(username: str, ssh_key: bytes, ip_address: str, remote_username: str):\n\n create_ssh_key_file(username=username, ssh_key=ssh_key, ip_address=ip_address)\n update_ansible_host_file(username=username, ip_address=ip_address)\n update_ansible_vars(\n remote_username=remote_username, username=username, ip_address=ip_address\n )\n AccessControlModel().grant_access(username=username, ip_addresses=[ip_address])", "def create_ssh_handle(xcnode):\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.set_missing_host_key_policy(paramiko.WarningPolicy)\n\n try:\n client.connect(\n hostname=xcnode.hostname,\n username=xcnode.username,\n password=xcnode.password,\n port=int(xcnode.port)\n )\n xcnode.fd.write('ssh\\'ed to {} @ {}\\n'.format(\n xcnode.hostname, datetime.now()))\n except Exception as e:\n print e\n client = None\n\n xcnode.client = client\n\n return xcnode", "def session_open(self):\n logger.debug(\"entering session_open()\")\n kwargs = {\"hostname\": self.host, \"username\": self.user}\n ssh_client = paramiko.SSHClient()\n ssh_client.load_system_host_keys()\n ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh_config = os.path.expanduser(\"~/.ssh/config\")\n ask_pass = False\n key_found = False\n if (\n os.path.isfile(os.path.expanduser(\"~/.ssh/id_rsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_dsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_ecdsa\"))\n ):\n key_found = True\n\n if os.path.isfile(ssh_config):\n config = paramiko.SSHConfig()\n with open(ssh_config) as open_ssh_config:\n config.parse(open_ssh_config)\n config = config.lookup(self.host)\n if config.get(\"proxycommand\"):\n self._sock = paramiko.proxy.ProxyCommand(config.get(\"proxycommand\"))\n kwargs.update({\"sock\": self._sock})\n\n agent = paramiko.Agent()\n agent_keys = agent.get_keys()\n logger.debug(\"ssh agent has {} keys\".format(len(agent_keys)))\n\n if self.passwd is not None:\n kwargs.update(\n {\"password\": self.passwd, \"allow_agent\": False, \"look_for_keys\": False}\n )\n elif self.user != getpass.getuser():\n print(\n \"skipping publickey ssh auth as {} != {}\".format(\n self.user, getpass.getuser()\n )\n )\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n elif self.key_filename is not None:\n kwargs.update(\n {\n \"key_filename\": self.key_filename,\n \"allow_agent\": False,\n \"look_for_keys\": False,\n \"password\": None,\n }\n )\n # paramiko is a little broken (see github issue #1664) \n # work around by always asking for passphrase here\n # else \"SSHException: encountered RSA key, expected OPENSSH key\" error\n # when key has passphrase\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n elif len(agent_keys) == 0 and not key_found:\n print(\"no ssh keys found, nor ssh agent running, skipping publickey ssh auth\")\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n\n if ask_pass:\n self.passwd = getpass.getpass(\n prompt=\"{}@{}'s password: \".format(self.user, self.host), stream=None\n )\n kwargs[\"password\"] = self.passwd\n\n try:\n ssh_client.connect(**kwargs)\n except PasswordRequiredException:\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n ssh_client.connect(**kwargs)\n return ssh_client", "def ssh(self) -> Optional[pulumi.Input['LinuxProfilePropertiesSshArgs']]:\n return pulumi.get(self, \"ssh\")", "def setup(c):\n files.directory(conn, utils.join(SALT_DEPLOY_PATH, utils.DEPLOY_REPO_DIR))\n files.directory(conn, utils.join(SALT_DEPLOY_PATH, utils.DEPLOY_RELEASES_DIR))\n\n with conn.cd(utils.join(SALT_DEPLOY_PATH, utils.DEPLOY_REPO_DIR)):\n if not files.exists(conn, \"HEAD\"):\n conn.run(f\"git clone --mirror --depth 1 --no-single-branch {SALT_REPO} .\")\n\n conn.run(f\"git remote set-url origin {SALT_REPO}\")\n conn.run(f\"git fetch --depth 1 origin {SALT_BRANCH}\")" ]
[ "0.6442464", "0.64414567", "0.6348135", "0.61858404", "0.61858267", "0.6147583", "0.6134668", "0.6058073", "0.59874713", "0.58345175", "0.5792532", "0.5744603", "0.572959", "0.5720658", "0.56933945", "0.56765187", "0.56707656", "0.56636703", "0.55993026", "0.558472", "0.5567214", "0.5554396", "0.5542118", "0.5533366", "0.5524518", "0.55240136", "0.5491325", "0.5479465", "0.54737926", "0.5469327", "0.5431096", "0.54218954", "0.54010874", "0.5400527", "0.5398241", "0.53903717", "0.5380789", "0.5376335", "0.53623885", "0.5360236", "0.5358119", "0.5342202", "0.5338686", "0.53119236", "0.53116107", "0.52951545", "0.5282391", "0.52768713", "0.5275007", "0.52695024", "0.52694434", "0.52560204", "0.52513367", "0.5241311", "0.5236359", "0.52070785", "0.5201358", "0.51847523", "0.51679295", "0.51649535", "0.51621944", "0.5158051", "0.5154262", "0.51526546", "0.51429826", "0.51302433", "0.5123009", "0.51196426", "0.51149535", "0.51072884", "0.509473", "0.5083243", "0.50798166", "0.5075017", "0.5068542", "0.5061606", "0.50609934", "0.5052949", "0.5050629", "0.504202", "0.50406563", "0.5034698", "0.503184", "0.5029465", "0.50255114", "0.50211084", "0.50111043", "0.5008305", "0.5000738", "0.49989977", "0.49958235", "0.49922767", "0.49875897", "0.49867058", "0.49821413", "0.49792394", "0.49730068", "0.4960168", "0.49538442", "0.49520743" ]
0.6833319
0
Generate a new SSH key and deliver it to the server. If quickname is provided, also set up an ssh shortcut. Use this to enable passwordless access to webfaction.
def setup_ssh_keys(output_keyfile="id_rsa", ssh_type="rsa", quickname=None): with settings(warn_only=True): local("mkdir -p $HOME/.ssh") with cd("$HOME/.ssh"): local("ssh-keygen -t %s -f %s" % (ssh_type, output_keyfile)) for host in env.hosts: local("scp %s.pub %s:temp_id_key.pub" % (output_keyfile, host)) with settings(warn_only=True): run("mkdir -p $HOME/.ssh") run("cat $HOME/temp_id_key.pub >> ~/.ssh/authorized_keys") run("rm $HOME/temp_id_key.pub") run("chmod 600 $HOME/.ssh/authorized_keys") run("chmod 700 $HOME/.ssh") run("chmod go-w $HOME") if quickname: update_ssh_shortcut(output_keyfile, quickname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_ssh_shortcut(output_keyfile, quickname=None):\n if quickname:\n with settings(warn_only=True):\n local(\"touch $HOME/.ssh/config\")\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Host %s' >> $HOME/.ssh/config\" % quickname)\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Hostname %s' >> $HOME/.ssh/config\" % host_name)\n local(r\"echo 'User %s' >> $HOME/.ssh/config\" % user)\n local(r\"echo 'IdentityFile ~/.ssh/%s' >> $HOME/.ssh/config\" % output_keyfile)\n local(r\"echo 'ServerAliveCountMax 3' >> $HOME/.ssh/config\")\n local(r\"echo 'ServerAliveInterval 10' >> $HOME/.ssh/config\")", "def ssh_keygen(username):\n d = user_exists(username)\n assert d, fabric.colors.red(\"User does not exist: %s\" % username)\n\n home = d['home']\n if not fabric.contrib.files.exists(os.path.join(home, \".ssh/id_rsa.pub\")):\n fabric.api.run(\"mkdir -p %s\" % os.path.join(home, \".ssh/\"))\n fabric.api.run(\n \"ssh-keygen -q -t rsa -f '%s' -N ''\" %\n os.path.join(\n home, '.ssh/id_rsa'))\n run('chown indabom:indabom {}'.format(\"/home/indabom/.ssh\"))\n run('chown indabom:indabom {}'.format(\"/home/indabom/.ssh/id_rsa\"))\n run('chown indabom:indabom {}'.format(\"/home/indabom/.ssh/id_rsa.pub\"))", "def gen_key(app):\n\tos.system('lxc-attach -n %s -- ssh-keygen -t rsa -N \"\" -f key' % app)", "def pushkey(self, addr, passwd, keyname=\"\", pubkey=\"\", port=22, login=\"root\"):\n ExecutorSSH(addr, port=port, login=login, passwd=passwd, pushkey=keyname, pubkey=pubkey)", "def create_ssh_keypair(keyname, comment):\n sshdir = os.path.join(util.get_homedir(), '.ssh')\n util.create_directory(sshdir, 0o700)\n keyfile = os.path.join(sshdir, keyname)\n if util.try_stat(keyfile):\n raise RuntimeError('~/.ssh/{} already exists'.format(keyname))\n subprocess.check_call(['ssh-keygen', '-f', keyfile, '-N', \"\", '-q', '-C', comment])\n os.chmod(keyfile, 0o600)\n os.chmod(keyfile + '.pub', 0o644)\n return keyfile", "def create_key(name):\n\tinput_data = GPG.gen_key_input(\n\t\tkey_type='RSA',\n\t\tkey_length='1024',\n\t\tname_real='PGP File System',\n\t\tname_comment=create_comment(name),\n\t\tname_email='[email protected]'\n\t)\n\treturn GPG.gen_key(input_data)", "def create_key(name):\n input_data = GPG.gen_key_input(\n key_type='RSA',\n key_length='1024',\n name_real='PGP File System',\n name_comment=create_comment(name),\n name_email='[email protected]'\n )\n return GPG.gen_key(input_data)", "def ssh_keygen(type=\"ed25519\", keysize=None, id_file=\"\", pem=False, derivation_rounds=None, comment=None, force=False, urls=ssh_registration_urls, open_urls_for_existing_file=False):\n if not id_file:\n id_file = path.expanduser(\"~/.ssh/id_{}\".format(type))\n pub_file = id_file + \".pub\"\n \n if path.exists(id_file) and path.exists(pub_file) and not force:\n print(\"SSH key file {} already exists\".format(id_file))\n if not open_urls_for_existing_file:\n return\n else:\n params = [\"ssh-keygen\", \"-t\", type, \"-f\", id_file];\n if keysize:\n params += [\"-b\", str(keysize)]\n if not pem:\n params += [\"-o\"]\n if derivation_rounds is None:\n derivation_rounds = 100\n if derivation_rounds:\n if not pem:\n params += [\"-a\", str(derivation_rounds)]\n else:\n print(\"Using key derivation {} with PEM is not supported\".format(derivation_rounds))\n if comment is not None:\n params += [\"-C\", comment]\n print(\"SSH key file {} does not exist, creating new one with {}, format {} (with {} derivation rounds) and size {}\\n{}\".format(id_file, type, \"PEM\" if pem else \"RFC4716\", derivation_rounds or 0, keysize or \"default\", params))\n tools.run(*params)\n \n print(\"Copying SSH key into clipboard\")\n import subprocess\n subprocess.call(\"/usr/bin/pbcopy\", stdin=open(pub_file))\n for url in urls:\n print(\"Opening {}\".format(url))\n tools.run(\"open\", \"https://uberspace.de/dashboard/authentication\")", "def gen_keys_old(name):\n d = 'keys'\n if not os.path.isdir(d):\n os.mkdir(d)\n if not os.path.isfile('%s/%s.pem'%(d,name)):\n open('%s/%s.pem'%(d,name),'w').write(Crypto.PublicKey.RSA.generate(1024,os.urandom).exportKey('PEM'))", "def add_local_ssh_key(self):\n return\n user_ssh_key = open('/home/<$user>/.ssh/id_rsa.pub').read()\n key = digitalocean.SSHKey(token=self.secret_token,\n name='machine-name',\n public_key=user_ssh_key)\n key.create()", "def create_key ():", "def create_ssh_key_file(username: str, ssh_key: bytes, ip_address: str):\n\n if not os.path.exists(\"./ansible/keys\"):\n os.mkdir(\"./ansible/keys\")\n\n with open(f\"./ansible/keys/admin_{ip_address}.pem\", \"w\") as ssh_key_file:\n ssh_key_file.write(ssh_key.decode())\n\n os.system(f\"chmod 400 ./ansible/keys/admin_{ip_address}.pem\")", "def cmd_setup_ssh(public_key_file):\n\n def add_helper(key_file):\n if exists(key_file):\n try:\n fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]\n key = open(key_file, 'r').read().strip()\n echo(\"Adding key '{}'.\".format(fingerprint), fg='white')\n setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)\n except Exception:\n echo(\"Error: invalid public key file '{}': {}\".format(key_file, format_exc()), fg='red')\n elif public_key_file == '-':\n buffer = \"\".join(stdin.readlines())\n with NamedTemporaryFile(mode=\"w\") as f:\n f.write(buffer)\n f.flush()\n add_helper(f.name)\n else:\n echo(\"Error: public key file '{}' not found.\".format(key_file), fg='red')\n\n add_helper(public_key_file)", "def ssh_public_key(self, key_name: str) -> str:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def create_keypair(key_name):\n if os.path.isfile(SSH_FOLDER + key_name + \".pem\"):\n return # Key already created\n ec2 = boto.ec2.connect_to_region(AWS_REGION)\n key = ec2.create_key_pair(key_name)\n key.save(SSH_FOLDER)", "def upload_public_key():\n log('Adicionando chave publica no servidor', green)\n ssh_file = '~/.ssh/id_rsa.pub'\n target_path = '~/.ssh/uploaded_key.pub'\n put(ssh_file, target_path)\n run('echo `cat ~/.ssh/uploaded_key.pub` >> ~/.ssh/authorized_keys && rm -f ~/.ssh/uploaded_key.pub')", "def do_minerkey(argv):\n\n global PRIVATE_KEY\n\n if not PRIVATE_KEY:\n print(\"Error: private key is missing. Use command 'new' to generate key\")\n else:\n PRIVATE_KEY = wallet.get_private_key()\n minerkey = wallet.private_key_to_wif(PRIVATE_KEY, 0, 0)\n file = open(\"data/minerkey\", \"w\")\n file.write(minerkey)\n print(\"Minerkey was created in WIF format and saved to 'data/minerkey'\")\n file.close()", "def download_key():\n data = check_args(('cloudProvider', ))\n provider = jobs.init_provider(data, True)\n key = encrypt_key(provider.get_key(), data['username'])\n return make_response(keyName=provider.keyname, key=key)", "def generate_key(self):\n cmd = self.generate_key_cmd()\n self.show(cmd)\n if self.dryrun:\n return None\n s, _, _ = self.as_user(cmd)\n assert s == 0, ('failed to generate key', cmd)\n keyname = self.extract_key_name()\n return keyname", "def _generateSSHKey(self, private_filepath, public_filepath):\n self.log.debug(\"Writing SSH keys to: \" + private_filepath + \" and \" + public_filepath)\n\n (ssh_dir, filename) = os.path.split(os.path.expanduser(private_filepath))\n if not os.path.exists(ssh_dir):\n self.log.debug(\"SSH Directory doesn't exist, creating \" + ssh_dir)\n os.makedirs(ssh_dir)\n\n key = paramiko.RSAKey.generate(1024)\n key.write_private_key_file(os.path.expanduser(private_filepath))\n \n with open(os.path.expanduser(public_filepath),\"w\") as public:\n public.write(\"%s %s\" % (key.get_name(), key.get_base64()))\n\n public.close()", "def generate_key():\r\n # generating key\r\n key = Fernet.generate_key()\r\n\r\n key_dir = os.path.join(os.path.dirname(__file__), \"resources/key\")\r\n\r\n # writing key in file\r\n with open(key_dir, \"wb\") as keyFile:\r\n keyFile.write(key)", "def ssh_setup(existing_key: Optional[Path] = None, force: bool = False):\n\n if not shutil.which(\"ssh\"):\n raise errors.SSHNotFoundError()\n\n system_config = SystemSSHConfig()\n\n include_string = f\"Include {system_config.renku_ssh_root}/*.conf\\n\\n\"\n\n if include_string not in system_config.ssh_config.read_text():\n with system_config.ssh_config.open(mode=\"r+\") as f:\n content = f.read()\n f.seek(\n 0, 0\n ) # NOTE: We need to add 'Include' before any 'Host' entry, otherwise it is included as part of a host\n f.write(include_string + content)\n\n if not existing_key and not force and system_config.is_configured:\n communication.confirm(f\"Keys already configured for host {system_config.renku_host}. Overwrite?\", abort=True)\n\n if existing_key:\n communication.info(\"Linking existing keys\")\n existing_public_key = existing_key.parent / (existing_key.name + \".pub\")\n\n if not existing_key.exists() or not existing_public_key.exists():\n raise errors.KeyNotFoundError(\n f\"Couldn't find private key '{existing_key}' or public key '{existing_public_key}'.\"\n )\n\n if system_config.keyfile.exists():\n system_config.keyfile.unlink()\n if system_config.public_keyfile.exists():\n system_config.public_keyfile.unlink()\n\n os.symlink(existing_key, system_config.keyfile)\n os.symlink(existing_public_key, system_config.public_keyfile)\n else:\n communication.info(\"Generating keys\")\n keys = generate_ssh_keys()\n system_config.keyfile.touch(mode=0o600)\n system_config.public_keyfile.touch(mode=0o644)\n with system_config.keyfile.open(\n \"wt\",\n ) as f:\n f.write(keys.private_key)\n\n with system_config.public_keyfile.open(\"wt\") as f:\n f.write(keys.public_key)\n\n communication.info(\"Writing SSH config\")\n with system_config.jumphost_file.open(mode=\"wt\") as f:\n # NOTE: The * at the end of the jumphost name hides it from VSCode\n content = textwrap.dedent(\n f\"\"\"\n Host jumphost-{system_config.renku_host}*\n HostName {system_config.renku_host}\n Port 2022\n User jovyan\n \"\"\"\n )\n f.write(content)", "def upload_key():\n data = check_args(('cloudProvider', 'key'))\n provider = jobs.init_provider(data, True)\n key = decrypt_key(data['key'], data['username'])\n provider.save_key(key)\n return make_response()", "def create_keypair(self, username):\n msg = \"create_keypair not implemented\"\n raise NotImplementedError(msg)", "def generate_key():\n key = crypto.Key.generate_key()\n click.echo('Private Key (len {}):: \\n{}'.format(\n len(key.get_privkey()),\n hexlify(key.get_privkey())))\n click.echo('Public Key (len {})::\\n{}'.format(\n len(key.get_pubkey()),\n hexlify(key.get_pubkey())))", "def sshkey():\n with settings( hide( 'everything' ), warn_only=True ):\n print ( '\\rChecking %s... ' % env['host'] ),\n\n try:\n dsa = open( os.getenv('HOME') + '/.ssh/id_dsa.pub', 'r' ).readline().split()\n except IOError as e:\n sys.exit( 'SSH ID file not found' )\n run( 'if [ -d .ssh ]; then true; else mkdir .ssh; fi' )\n exists = run( 'grep \\'%s\\' ~/.ssh/authorized_keys' % dsa[1] )\n if not exists.succeeded:\n run ( 'echo %s %s %s >> ~/.ssh/authorized_keys' % (dsa[0], dsa[1], dsa[2]) )\n print 'SSH key added!'\n else:\n print 'SSH key already present, no update required'", "def new_public_key(self):\n\n option = 'new_public_key'\n _file = self.__get_option(option)\n\n if _file and not os.path.exists(_file) and not os.path.isfile(_file):\n self.log.error(\"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n raise ConfigError('File Error', \"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n else:\n return None", "def create_user_key_file(username: str):\n\n user: User = UserModel().get_user(username=username)\n user_key: Key = user.public_key\n\n public_key: bytes = user_key.public_key\n\n if not os.path.exists(\"./ssh_ca\"):\n os.mkdir(\"./ssh_ca\")\n\n with open(f\"./ssh_ca/{username}.pub\") as public_key_file:\n public_key_file.write(public_key.decode())", "def create_keypair(address_type, addresses_path, address_prefix, name):\n vkey_file = get_vkey_file(addresses_path, address_prefix, name)\n skey_file = get_skey_file(addresses_path, address_prefix, name)\n\n if(path.exists(vkey_file)) :\n print(address_prefix, \"key pair already exists for\", name)\n return\n \n makedirs(path.dirname(vkey_file), mode=0o777, exist_ok=True)\n\n run_params = ['cardano-cli', address_type, 'key-gen', '--verification-key-file', vkey_file, '--signing-key-file', skey_file]\n subprocess_run(run_params, capture_output=False, text=True)\n return", "def makeKey( self, bSerial, sVersion, bNumcam, sMac ):\n\n\t\tbSeed = 0\n\t\tbSeed = self._setSerial( bSeed, bSerial )\n\t\tbSeed = self._setVersion( bSeed, sVersion )\n\t\tbSeed = self._setNumcam( bSeed, bNumcam )\n\t\tbSeed = self._setMac( bSeed, sMac )\n\n\t\tsKey = commands.getoutput( '/usr/local/bin/make-key -s %s' % bSeed )\n\t\tif len( sKey ) != 24:\n\t\t\traise Exception, 'make-key did not return a valid key [%s]' % sKey\n\n\t\treturn sKey", "def install_keys():\n\n # get proxy list from proxylb\n local('scp alpha@proxylb:proxyrotate/proxies.list .')\n if os.path.isfile('proxies.list'):\n for line in open('proxies.list'):\n ip = line.strip().split(',')[0].strip()\n env.host_string = ip\n env.user = 'alpha'\n local('scp id_rsa.pub alpha@%s:' % ip)\n run('cat id_rsa.pub >> .ssh/authorized_keys')", "def do_new(argv):\n\n global PRIVATE_KEY\n\n if not PRIVATE_KEY:\n PRIVATE_KEY = wallet.get_private_key()\n else:\n get_new = yes_or_no(\"Private key already exist, do you want generate new one ?\")\n if get_new:\n PRIVATE_KEY = wallet.get_private_key()\n print(\"Private Key: '\" + PRIVATE_KEY + \"'\")\n cmpr_pub_key = wallet.get_compressed_public_key(PRIVATE_KEY, 1)\n addr = wallet.public_key_to_address(cmpr_pub_key, 0)\n open(\"data/address\", \"w\").write(addr)\n print(\"Public key was saved to 'data/cmpr_pub_key'\")", "def gen_api_key(username):\n salt = str(os.urandom(64)).encode('utf-8')\n return hash_password(username, salt)", "def generate_key(self):\n self.key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(self.key)", "def generate_key_cmd(self, cfg_path=None):\n # TODO: use tempfile\n if cfg_path is None:\n cfg_path = '/tmp/gen-key.cfg'\n self.create_gen_key_cfg_file(cfg_path)\n return '/usr/bin/gpg --batch --gen-key {cfg_path}'.format(cfg_path=cfg_path)", "def create_apikey(self, username, api_key):\r\n return 'ApiKey %s:%s' % (username, api_key)", "def private_key():\n return \"Toholampi summer festival 2017 has the most harcore rock bands\"", "def upload_local_public_key(use_poweruser=False,\n local_pub_key_path='~/.ssh/id_rsa.pub'):\n target_user = env.user\n target_home = '.'\n acting_user = env.user\n remote_run = run\n use_sudo = False\n\n if use_poweruser:\n use_sudo = True\n remote_run = sudo\n acting_user = env.poweruser\n # switch to power user to login and create key file\n # (we do not allow unprivileged user login with password)\n with settings(hide('everything'), user=acting_user, warn_only=True):\n target_home = run(\"getent passwd {}\"\n \"|awk -F: '{{print $6}}'\".format(target_user))\n if not exists(target_home):\n print(red(\"User's home directory does not exist\"))\n return\n\n pubkey_path = os.path.expanduser(local_pub_key_path)\n if not os.path.exists(pubkey_path):\n print(red(\"Local public key not found: {}\".format(pubkey_path)))\n return\n\n key = ' '.join(open(pubkey_path).read().strip().split(' ')[:2])\n with settings(user=acting_user), cd(target_home):\n remote_run('mkdir -p .ssh')\n # 'append' with use_sudo duplicates lines within 'cd'.\n # https://github.com/fabric/fabric/issues/703\n # Passing 'shell=True' to append() (which is supported in\n # Fabric 1.6) fixes this issue.\n append('.ssh/authorized_keys', key, partial=True, shell=True,\n use_sudo=use_sudo)\n remote_run('chmod 600 .ssh/authorized_keys')\n remote_run('chmod 700 .ssh')\n remote_run('chown -R {0}:{0} .ssh'.format(target_user))", "def setupSSH(key_rsa_path, key_append_path, key_gen_cmd, HostList):\n # Generate SSH key on localhost\n LocalKey = getLocalKey(key_gen_cmd, key_rsa_path)\n\n # Setup passwordless SSH with each of the specified machines\n for i in HostList:\n if i[0] != 'localhost':\n\n box_ip = i[1]\n user = i[2]\n pwd = i[3]\n\n out = subprocess.Popen(\"echo $\" + user, shell=True,\n stdout=subprocess.PIPE)\n box_user = out.stdout.read().rstrip('\\n')\n out = subprocess.Popen(\"echo $\" + pwd, shell=True,\n stdout=subprocess.PIPE)\n box_pwd = out.stdout.read().rstrip('\\n')\n try:\n\n RemoteKey = getRemoteKey(key_gen_cmd, key_rsa_path, box_ip,\n box_user, box_pwd)\n appendLocalKeyInRemote(LocalKey, key_append_path, box_ip,\n box_user, box_pwd)\n appendRemoteKeyInLocal(RemoteKey, key_append_path, box_ip)\n logging.info(\"Passwordless SSH has been setup b/w \\\n localhost & %s\", box_ip)\n\n except (paramiko.SSHException, paramiko.BadHostKeyException,\n paramiko.AuthenticationException, socket.error) as e:\n logging.info(\"Passwordless SSH setup failed b/w localhost & %s \\\n with %s, please verify host connectivity\", box_ip, e)", "def __setup_deploy(self):\r\n # Create a SSH Key-pair and push it to the robot\r\n if not self.ssh_key.exists():\r\n subprocess.run(['ssh-keygen',\r\n '-b', '4096',\r\n '-t', 'rsa',\r\n '-f', self.ssh_key,\r\n '-q', '-N', ''\r\n ])\r\n\r\n os.chmod(self.ssh_key, 0o600)\r\n os.chmod(self.ssh_pub, 0o600)\r\n print('Please enter the password if asked.')\r\n subprocess.run(\r\n ['ssh-copy-id',\r\n '-i', self.ssh_key,\r\n 'robot@{}'.format(self.settings['ip'])\r\n ], stderr=open(os.devnull, 'wb'))\r\n print('Try to log into the brick:')\r\n print('\\tssh -i {} robot@{}'.format(self.ssh_key, self.settings['ip']))", "def makeKeyV2( self, sSeed, bSerial, sVersion, bNumcam, bPosLock, bLprLock, sFeatures, sPosTypes ):\n\n\t\trgs = [\n\t\t\t'/usr/local/bin/make-key',\n\t\t\t'-s', sSeed,\n\t\t\t'-n', str( bNumcam ),\n\t\t\t'-p', str( bPosLock ),\n\t\t\t'-L', str( bLprLock ),\n\t\t\t'-S', str( bSerial ),\n\t\t\t'-V', sVersion,\n\t\t\t'-F', sFeatures,\n\t\t\t'-P', sPosTypes\n\t\t]\n\t\t#dbgMsg( 'Making key with command [%s]' % \" \".join( rgs ) )\n\n\t\toCMD = subprocess.Popen(\n\t\t\trgs,\n\t\t\tstdin=subprocess.PIPE,\n\t\t\tstdout=subprocess.PIPE,\n\t\t\tstderr=subprocess.STDOUT,\n\t\t\tshell=False,\n\t\t\tclose_fds=True\n\t\t)\n\t\tsOutput = oCMD.communicate()[ 0 ]\n\t\tbStatus = oCMD.returncode\n\n\t\t#dbgMsg( 'make-key return value [%d]' % bStatus )\n\t\t#dbgMsg( 'make-key return output [%s]' % sOutput )\n\n\t\tif bStatus != 1:\n\t\t\traise Exception( 'make-key returned bad exit status' )\n\n\t\tsKey = sOutput.strip()\n\n\t\tif len( sKey ) != 39:\n\t\t\traise Exception( 'make-key did not return a valid key [%s]' % sKey )\n\n\t\treturn sKey", "def write_key(key_name):\n key = Fernet.generate_key()\n with open(key_name, \"wb\") as key_file:\n key_file.write(key)", "def generateKeys(self, keys_path, minion_id):\n #Change directory to keys path\n os.chdir(keys_path)\n #Give permission to the salt user\n self.console_manager.printRed(\"Giving permission to the salt user\")\n command = ['sudo', 'chmod', 'a+rwx', '.']\n self.console_manager.runCommandFromShell(command)\n #Generate keys\n self.console_manager.printRed(''.join([\"Generating keys for minion id: \", minion_id]))\n command = ['sudo', 'salt-key', ''.join(['--gen-keys=', minion_id])]\n self.console_manager.runCommandFromShell(command)\n #Give permission to the salt user\n self.console_manager.printRed(\"Allowing vagrant to handle private keys\")\n command = ['sudo', 'chmod', 'a+rwx', ''.join([minion_id, '.pub']), ''.join([minion_id, '.pem'])]\n self.console_manager.runCommandFromShell(command)\n #Add public key to the accepted minion folder\n self.console_manager.printRed(\"Copying the minion public key to the salt master public keys folder\")\n command = ['sudo', 'cp', ''.join([minion_id, '.pub']), ''.join(['/var/lib/salt/pki/master/minions/', minion_id])]\n self.console_manager.runCommandFromShell(command)\n command = ['sudo', 'cp', ''.join([minion_id, '.pub']), ''.join(['/etc/salt/pki/master/minions/', minion_id])]\n self.console_manager.runCommandFromShell(command)\n return", "def generate_key():\n key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(key)", "def generate_key():\n key = Fernet.generate_key()\n with open(\"Secret.key\",\"wb\")as key_file:\n key_file.write(key)", "def infocalypse_genkey(ui_, **opts):\n params, dummy = get_config_info(ui_, opts)\n execute_genkey(ui_, params)", "def do_key(self, args):\n if args == '':\n print('at least one container name should be input')\n return\n args = args.split()\n try:\n position = args.index('-p')\n except ValueError:\n position = -1\n if position >= 0 and position < len(args):\n tmp = args.copy()\n private_key = args[position+1]\n tmp.pop(position)\n tmp.pop(position)\n containers_name = tmp\n else:\n containers_name = args\n container_name = containers_name[0]\n try:\n keys = self._user.change_key(container_name, private_key='')\n print('public key: (copy this to remote machine \"/home/$USER/.ssh/authorized_keys\")')\n print(keys['public_key'])\n print('private key: (copy this to local machine \"/home/$USER/.ssh/id.rsa\", make sure the file umask is 600 with the command `chmod 600 id.rsa`)')\n print(keys['private_key'])\n except pylxd.exceptions.LXDAPIException as e:\n print('key: not success, the container\\'s name is: {}, \\nthe error information is: {}'.format(container_name, e))", "def ssh_add_key(self, pub_key_file):\n with open(os.path.normpath(pub_key_file), 'rt') as f:\n ssh_key = f.read()\n if fab.env.user == 'root':\n ssh_dir = '/root/.ssh'\n else:\n if 'home_dir' in fab.env:\n ssh_dir = _('%(home_dir)s/.ssh')\n else:\n ssh_dir = _('/home/%(user)s/.ssh')\n\n remote_os = fab.env.os\n remote_os.mkdir(ssh_dir)\n fab_files.append('%s/authorized_keys' % ssh_dir, ssh_key)\n\n with fab.settings(warn_only=True): # no chmod in system\n remote_os.set_permissions(ssh_dir, pattern='700')\n remote_os.set_permissions('%s/authorized_keys' % ssh_dir,\n pattern='600')", "def generate(self, force=False):\n if not self.check_force_generate(force):\n return False\n\n mkdirs(self.path)\n\n command = [openssl, 'ecparam', '-genkey', '-name', self.asn1_oid, '-out', self.key_file]\n\n self.log.info('Generating EC key')\n # Generate the keyfile with no password\n if not run_command(command):\n raise RuntimeError('EC key generation failed', self)\n\n # Now encrypt the key with a password, overwriting the original\n # passwordless key.\n if self.password:\n command = [\n openssl, 'ec',\n '-in', self.key_file,\n '-out', self.key_file,\n '-des3', '-passout', 'pass:{}'.format(self.password)\n ]\n self.log.info('Encrypting key with password')\n\n if not run_command(command):\n raise RuntimeError('EC key file password encryption failed')\n\n if not self.exists():\n raise RuntimeError(\n 'Key generation succeeded but key file does not exist. '\n 'This should not happen', self\n )", "def generate_key():\n key = Fernet.generate_key()\n with open(\"pass.key\", \"wb\") as key_file:\n key_file.write(key)", "def util_generate_key(conf_file=None):\n keyname = DebRepo(**config(conf_file=conf_file)).generate_key()\n print(keyname)", "def keygen():\n pk, pub = generate_signing_key()\n t = PrettyTable([\"Private (install on your witness node)\",\n \"Public (publish with 'conductor enable' command)\"])\n t.align = \"l\"\n t.add_row([pk, pub])\n\n output(t, '')", "def create_ssh_keys(self):\n self.random_ssh()\n\n return self.keys", "def gen_temp_key(self, keysize=1024):\n self.temp_session_key = [None, None]\n self.key_exchange_gui.generating_temp_key()\n return self.gen_key_pair(keysize, self.gen_temp_key_cb)", "def public_key(self):", "def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):\n\n authorized_keys = join(environ['HOME'], '.ssh', 'authorized_keys')\n if not exists(dirname(authorized_keys)):\n makedirs(dirname(authorized_keys))\n # Restrict features and force all SSH commands to go through our script\n with open(authorized_keys, 'a') as h:\n h.write(\"\"\"command=\"FINGERPRINT={ssh_fingerprint:s} NAME=default {script_path:s} $SSH_ORIGINAL_COMMAND\",no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding {pubkey:s}\\n\"\"\".format(**locals()))\n chmod(dirname(authorized_keys), S_IRUSR | S_IWUSR | S_IXUSR)\n chmod(authorized_keys, S_IRUSR | S_IWUSR)", "def _gen_key(self):\n\n input_data = self._gpg.gen_key_input(key_type=\"RSA\",\n key_length=self.key_length, name_real=self.name,\n name_comment=self.comment, name_email=self.email)\n\n log.info(\"Generating key: (%s)\" % input_data)\n\n self.key = self._gpg.gen_key(input_data)", "def start(self):\n keyfile = self._getKeyPath()\n if j.do.getSSHKeyPathFromAgent(\"$(key.name)\", die=False) is None:\n cmd = 'ssh-add %s' % keyfile\n j.do.executeInteractive(cmd)", "def ask_keyshare(self, prompt=None, key_type=None):\n\n if prompt is None:\n prompt = 'Now please enter your key share in hex-coded form: \\n'\n\n # old way:\n # key_share = raw_input(question).strip().upper()\n\n key_len = None\n if key_type is not None:\n key_len = key_type.key_len\n\n key_share = None\n hex_alphabet = [ord(x) for x in '0123456789abcdefABCDEF']\n err_y, err_x = 5, 2\n screen_wrapper = curses_screen()\n\n # initializes curses for dialog\n with screen_wrapper as win:\n win.addstr(0, 0, prompt)\n win.refresh()\n\n # Create window just for the key entry.\n # if key length is given - compute number of characters needed.\n data_length = key_len * 2 if key_len is not None else None\n max_width = self.get_term_width()\n\n w_offset = 0\n w_cols = max_width\n w_rows = 2\n\n if key_len is not None:\n w_rows = 1\n row_needed = math.ceil(data_length/4.0) * 5.0\n if row_needed > max_width:\n w_rows = int(math.ceil(row_needed / float(max_width)))\n\n win_key = curses.newwin(w_rows, w_cols, 3, w_offset)\n keybox = KeyBox(win_key, True)\n keybox.auto_format = True\n keybox.max_input_len = data_length\n\n if not self.hide_key:\n keybox.hide_input = False\n\n # editing routine\n error_shown = False\n while 1:\n ch = keybox.win.getch()\n if not ch:\n continue\n\n # Clear old error\n if error_shown:\n win.move(err_y, err_x)\n win.clrtoeol()\n win.refresh()\n keybox.goto_last()\n error_shown = False\n\n # Allow only hex characters\n if curses.ascii.isprint(ch) and ch not in hex_alphabet:\n continue\n\n # Allow finishing only if entering all characters\n if ch == curses.ascii.NL and key_type is not None:\n tmp_share = keybox.collect_buffer()\n tmp_share = tmp_share.strip().upper().replace(' ', '')\n try:\n tmp_share = key_type.process_key(tmp_share)\n except:\n pass\n\n if len(tmp_share) != key_len*2:\n try:\n win.addstr(err_y, err_x, 'Error: key size is invalid', screen_wrapper.get_red_attr())\n win.refresh()\n keybox.goto_last()\n error_shown = True\n except Exception as e:\n logger.error('curses error exception: %s' % e)\n continue\n\n if not keybox.do_command(ch):\n break\n\n keybox.win.refresh()\n\n key_share = keybox.collect_buffer()\n key_share = key_share.strip().upper().replace(' ', '')\n key_share_fix = key_share\n if key_type is not None:\n key_share_fix = key_type.process_key(key_share)\n\n return key_share, key_share_fix", "def install_secret_key(app, filename='secret_key'):\n filename = os.path.join(app.instance_path, filename)\n\n try:\n app.config['SECRET_KEY'] = open(filename, 'rb').read()\n except IOError:\n print('Error: No secret key. Create it with:')\n full_path = os.path.dirname(filename)\n if not os.path.isdir(full_path):\n print('mkdir -p {filename}'.format(filename=full_path))\n print('head -c 24 /dev/urandom > {filename}'.format(filename=filename))\n sys.exit(1)", "def create(self, name, public_key=None):\n data = {\n \"keypair\": {\n \"name\": name\n }\n }\n if public_key is not None:\n data['keypair']['public_key'] = public_key\n \n path = '/os-keypairs'\n res = self.client.call(path, 'POST', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Create/import openstack keypair: %s' % truncate(res))\n return res[0]['keypair']", "def newKeyGenerate():\n generate()\n return '', 204", "def adminGetUserKey(name):\n keys = hl.getUser(\"Name\",name)[\"Keys\"]\n #If on a production server, use actual path\n if os.path.isdir(keys_dir):\n filename = keys_dir + keys + '.ovpn' \n #if not os.path.exists(filename):\n # hl.zipUserKeys(keys) \n \n return send_file(filename, as_attachment=True)\n #Else use relative dev path\n else:\n return send_file('static\\\\Test_client1.zip', as_attachment=True)", "def ex_create_keypair(self, name):\n params = {\n 'Action': 'CreateKeyPair',\n 'KeyName': name,\n }\n response = self.connection.request(self.path, params=params).object\n key_material = self._findtext(response, 'keyMaterial')\n key_fingerprint = self._findtext(response, 'keyFingerprint')\n return {\n 'keyMaterial': key_material,\n 'keyFingerprint': key_fingerprint,\n }", "def generate_access_key(self):\n\t\tfrom app import app\n\t\ts = JSONWebSignatureSerializer(app.config['SECRET_KEY'])\n\t\taccess_key = s.dumps({'username': self.username}) \n\t\tself.access_key = access_key", "def generate_random_key(self):\n self.key = ''.join(choice(ascii_letters + digits) for i in range(300))", "def fingerprint(self, fingerprint_hash=None):\n try:\n fd, name = tempfile.mkstemp(prefix='sshkey-')\n with open(name, 'w') as fd:\n fd.write('{}'.format(self.line))\n if fingerprint_hash:\n p = Popen(('ssh-keygen', '-E', fingerprint_hash, '-lf', name), stdin=PIPE, stdout=PIPE, stderr=PIPE)\n else:\n p = Popen(('ssh-keygen', '-lf', name), stdin=PIPE, stdout=PIPE, stderr=PIPE)\n stdout, stderr = [str(v, 'utf-8') for v in p.communicate()]\n if p.returncode != 0:\n raise SSHKeyError('Error running ssh-keygen: returns {}'.format(p.returncode))\n os.unlink(name)\n return stdout.rstrip().split()[1].split(':', 1)[1]\n except Exception as e:\n raise SSHKeyError('Error getting fingerprint for {}: {}'.format(self.line, e))", "def getRemoteKey(cmd, path, ip, user, passwd):\n\n sshToOtherClient(ip, user, passwd, cmd)\n showKeyCmd = 'cat %s' % (path)\n remote_key = sshToOtherClient(ip, user, passwd, showKeyCmd)\n logging.debug(\"Remote key for %s has been generated successfully : %s\",\n ip, remote_key)\n return remote_key", "def write_key():\n key = fernet.Fernet.generate_key()\n keyfile = open(KEY_PATH,'wb')\n keyfile.write(key)\n keyfile.close()", "def get_or_create_key_name(self, gen_key=True):\n keyname = self.extract_key_name()\n if keyname:\n self.report(f'found keyname: {keyname}')\n elif gen_key:\n keyname = self.generate_key()\n self.report(f'generated key: {keyname}')\n else:\n print(f'gpg key for debrepo was not found for user {self.user}. '\n 'please use $0 generate_key, then try this command again')\n self.report('no keyname')\n keyname = None\n return keyname", "def download_data_key(self, name):\n temp_data_key = self._get_data_key(name)\n # File wasn't found on s3 so we return.\n if not temp_data_key:\n return\n\n output_file = \"/dev/shm/\" + name + \".tmp.key\"\n\n try:\n file = open(output_file, \"w\")\n except Exception as e:\n print \"[-] Error opening /dev/shm for writing.\"\n return\n\n file.write(temp_data_key)\n os.chmod(output_file, 0600)\n\n print \"[+] {0} data key saved to {1}\".format(name, output_file)", "def create_key(iam_username):\n\n try:\n response = iam.create_access_key(UserName=iam_username)\n access_key = response[\"AccessKey\"][\"AccessKeyId\"]\n secret_key = response[\"AccessKey\"][\"SecretAccessKey\"]\n json_data = json.dumps({\"AccessKey\": access_key, \"SecretKey\": secret_key})\n secretmanager.put_secret_value(SecretId=iam_username, SecretString=json_data)\n\n \n emailmsg = (\n \"Hello,\\n\\n\"\n \"A new access key has been created for key rotation. \\n\\n\"\n f\"Access Key Id: {access_key}\\n\"\n f\"Secrets Manager Secret Id: {iam_username}\"\n )\n\n emailmsg = (\n f\"{emailmsg}\\n\\n\"\n f\"Please obtain the new access key information from \"\n \"secrets manager using the secret Id provided above in \"\n f\"{AWS_REGION_NAME} and update your application within 14 days \"\n \"to avoid interruption.\\n\"\n )\n\n sns.publish(\n TopicArn=SNS_TOPIC_ARN,\n Message=emailmsg,\n Subject=f\"AWS Access Key Rotation: New key is available for \"\n f\"{iam_username}\",\n )\n print(f\"New access key has been created for {iam_username}\")\n return {\"status\": 200}\n except ClientError as e:\n print(e)\n return {\"status\": 500}", "def generate_key(name, func, *extra_keys, **options):\n\n return get_component(CachingPackage.COMPONENT_NAME).generate_key(name, func,\n *extra_keys, **options)", "def generate_key():\n return unicode(hashlib.sha224(str(random.getrandbits(128))).hexdigest())", "def create_key_pair(self, key_name):\n response = key_pair.create_key_pair(self.url, self.verb, self.headers,\n self.version, key_name)\n if response is not None :\n res = CreateKeyPairResponse.CreateKeyPairResponse()\n parseString(str(response.text), res)\n return res\n else :\n return None", "def appendLocalKeyInRemote(key, authorized_key_path, ip, user, passwd):\n\n key_append_cmd = 'echo \"%s\" >> %s' % (key, authorized_key_path)\n sshToOtherClient(ip, user, passwd, key_append_cmd)\n logging.debug(\"Local key has been added into authorized_keys in %s\", ip)", "def createkey(*args): # {{{2\n return '-'.join(map(simplifyname, args))", "def key_upload(self, key=None):\n\n name = key[\"name\"]\n cloud = self.cloud\n Console.msg(f\"upload the key: {name} -> {cloud}\")\n try:\n r = self.cloudman.create_keypair(name, key['public_key'])\n except: # openstack.exceptions.ConflictException:\n raise ValueError(f\"key already exists: {name}\")\n\n return r", "def genKey(self, otherKey):\n self.sharedSecret = self.genSecret(self.privateKey, otherKey)\n #print(\"Shared secret:\")\n #print(self.sharedSecret)\n s = hashlib.sha256()\n s.update(bytes(str(self.sharedSecret).encode()))\n self.key = s.digest()", "def generate_master_key(self):\n return utils.random(secret.SecretBox.KEY_SIZE)", "def gen_key_pair(self, keysize, cb):\n\n def gen_key_pair_pub_cb(data, ctx):\n if not data:\n warning('keymanagement: Could not generate a key pair\\n')\n cb(None, None)\n else:\n cb(ctx, data)\n\n def gen_key_pair_priv_cb(data, ctx):\n if not data:\n warning('keymanagement: Could not generate a key pair\\n')\n cb(None, None)\n else:\n xrun([self.sslname, 'rsa', '-pubout'], gen_key_pair_pub_cb,\n data, data)\n\n return xrun([self.sslname, 'genrsa', str(keysize)],\n gen_key_pair_priv_cb, None)", "def gen_api_key():\r\n m = hashlib.sha256()\r\n m.update(get_random_word(12))\r\n return unicode(m.hexdigest()[:12])", "def setup_keys():\n if os.path.isfile(\"key.txt\"):\n message = \"Key already generated\"\n else:\n secret = secrets.token_urlsafe(64)\n message = \"Secret generated and saved in key.txt\"\n with open(\"key.txt\", \"w\") as fd:\n fd.write(secret)\n return json.dumps({'message': message})", "def sshkey(request, action):\n if action == 'add':\n sform = SSHKeyForm(request, None, request.POST)\n if sform.is_valid():\n status = sform.save(action='create', args=(request.user.username, sform.cleaned_data['name']))\n if status == 201:\n messages.success(request, _('SSH key was successfully saved'))\n return redirect('profile')\n\n return render(request, 'gui/profile/profile_sshkey_form.html', {\n 'user': request.user,\n 'sform': sform\n }, status=200)\n\n elif action == 'delete':\n res = SSHKeyForm.api_call('delete', None, request, args=(request.user.username, request.POST.get('name')))\n status = res.status_code\n if status == 200:\n messages.success(request, _('SSH key was successfully removed'))\n return redirect('profile')\n\n return render(request, 'gui/profile/profile_sshkey_list.html', {\n 'user': request.user,\n 'ssh_keys': request.user.usersshkey_set.all().order_by('id'),\n }, status=status)", "def _create_key(chip):\n try:\n suffix = chip['version'][0]\n except IndexError:\n suffix = ''\n\n if chip['classification'] == 'secret':\n classification = 'z'\n else:\n classification = chip['classification'][0]\n\n return '%s-%s%s%s' % (chip['game'], classification, chip['indice'], suffix)", "def gen_pgp_key(name, email, comment=\"generated by sdata\"):\n\n # we can start by generating a primary key. For this example, we'll use RSA, but it could be DSA or ECDSA as well\n key = pgpy.PGPKey.new(PubKeyAlgorithm.RSAEncryptOrSign, 4096)\n\n # we now have some key material, but our new key doesn't have a user ID yet, and therefore is not yet usable!\n uid = pgpy.PGPUID.new(name, comment=comment, email=email)\n\n # now we must add the new user id to the key. We'll need to specify all of our preferences at this point\n # because PGPy doesn't have any built-in key preference defaults at this time\n # this example is similar to GnuPG 2.1.x defaults, with no expiration or preferred keyserver\n key.add_uid(uid, usage={KeyFlags.Sign, KeyFlags.EncryptCommunications, KeyFlags.EncryptStorage},\n hashes=[HashAlgorithm.SHA256, HashAlgorithm.SHA384, HashAlgorithm.SHA512, HashAlgorithm.SHA224],\n ciphers=[SymmetricKeyAlgorithm.AES256, SymmetricKeyAlgorithm.AES192, SymmetricKeyAlgorithm.AES128],\n compression=[CompressionAlgorithm.ZLIB, CompressionAlgorithm.BZ2, CompressionAlgorithm.ZIP,\n CompressionAlgorithm.Uncompressed])\n return key", "def CreateKeyFile():\n keyfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'genrsa',\n '-out', keyfile,\n '2048'\n ]\n _RunCommand(cmd)\n return keyfile", "def generate(self, module):\n\n # If size is wrong, delete the key. A new key will be generated in the next step.\n if self.key_current_size != self.size and not self.ignore_size:\n self.remove()\n self.key_exists = False\n else:\n self.changed = False\n\n # If there is no key or user has set \"force\"\n if not self.key_exists or self.force:\n if self.type == \"RSA\":\n self.key = crypto_rsa.generate_private_key(public_exponent=65537, key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"DSA\":\n self.key = crypto_dsa.generate_private_key(key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"ECDSA\":\n if self.size == 256:\n self.curve = crypto_ec.SECP256R1()\n elif self.size == 384:\n self.curve = crypto_ec.SECP384R1()\n elif self.size == 521:\n self.curve = crypto_ec.SECP521R1()\n self.key = crypto_ec.generate_private_key(curve=self.curve, backend=crypto_default_backend())\n elif self.type == \"ED25519\":\n self.size = 128\n self.curve = \"EC25519\"\n else:\n raise HostkeyError(\"Unknown key type.\")\n\n if self.type != \"ED25519\":\n self.privkey = self.key.private_bytes(crypto_serialization.Encoding.PEM, crypto_serialization.PrivateFormat.PKCS8, crypto_serialization.NoEncryption())\n self.pubkey = self.key.public_key().public_bytes(crypto_serialization.Encoding.OpenSSH, crypto_serialization.PublicFormat.OpenSSH)\n\n try:\n privfile = os.open(self.fullpath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(privfile, self.privkey)\n os.close(privfile)\n pubfile = os.open(self.fullpath + \".pub\", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(pubfile, self.pubkey)\n os.close(pubfile)\n self.changed = True\n except IOError:\n self.remove()\n raise HostkeyError(get_exception())\n else:\n # use ssh-keygen to generate ED25519 Hostkeys\n # Keyfile must not exist, as there is no \"force-overwrite\" in ssh-keygen\n self.remove()\n retcode = subprocess.call([\"ssh-keygen\", \"-q\", \"-t\", \"ed25519\", \"-N\", '', \"-f\", self.fullpath])\n self.changed = True\n else:\n self.changed = False\n\n file_args = module.load_file_common_arguments(module.params)\n file_args['path'] = self.fullpath\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True\n file_args['path'] = self.fullpath + \".pub\"\n file_args['mode'] = self.pubmode\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True", "def add_site(site_name, public_key):\n click.echo('Adding site key for site {}'.format(site_name))\n key_store = KeyStore(get_config_file())\n key_store.add_site(site_name, public_key)", "def update_ssh_public_key(self, key_name: str, public_key: str) -> str:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def write_key(self):\n\t key = Fernet.generate_key()\n\t with open(\"key.key\", \"wb\") as key_file:\n\t key_file.write(key)", "def set_fmp_key(key: str, persist: bool = False, show_output: bool = False) -> str:\n\n handle_credential(\"API_KEY_FINANCIALMODELINGPREP\", key, persist)\n return check_fmp_key(show_output)", "def generate_secret_key(self, server_name: str) -> str:\n if self.config_in_use():\n raise BaseConfigInUseError()\n\n signing_key_path = join(self.config_dir, server_name + \".signing.key\")\n subprocess.run([\"generate_signing_key.py\", \"-o\", signing_key_path])\n with open(signing_key_path, \"r\") as f:\n return f.read()", "def prepareInstance(username, sshId):\n print os.environ['EC2_KEYPAIR_PATH']\n with settings(user='ubuntu',\n key_filename=os.environ['EC2_KEYPAIR_PATH']):\n password = getpass('Enter a new password for user %s:' % username)\n password2 = getpass('Enter the password a again:')\n if password != password2:\n raise RuntimeError(\"Passwords don't match\")\n sudo('adduser --disabled-password --gecos \",,,\" %s' % username)\n cryptedPassword = _hashPassword(password)\n sudo('usermod --password %s %s' % (cryptedPassword, username))\n sudo('gpasswd --add %s admin' % username)\n authorizeSshKey(username, sshId)\n sudo('apt-get update')\n sudo('DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y')\n if exists('/var/run/reboot-required'):\n reboot()", "def save_keyname_file(self, keyname):\n if self.dryrun:\n keyname = '{keyname}'\n else:\n assert keyname, ('need keyname', keyname)\n\n cmd=f'/bin/echo \"{keyname}\" > {self.keyname_file}'\n s, out, err = self.as_user(cmd)\n\n if s == 0:\n self.report(f'Wrote {self.keyname_file}')\n elif s == None:\n pass # dryrun\n else:\n self.report(f'Failed to save {self.keyname_file}')\n self.report('\\t', err.decode('utf8'))\n return s", "def create_new_key(self) -> None:\r\n key_name = simpledialog.askstring(\"Key Name\", \"Please enter key name\",\r\n parent=self.parent)\r\n if key_name:\r\n try:\r\n self.callbacks[Events.ADD_KEY](self.selected_item.path, key_name)\r\n self.tree.insert(self.selected_item.id, 'end', text = key_name, open = True, image = self.folder_img, tags = (EXPLICIT_TAG, ))\r\n except Exception as e:\r\n self.callbacks[Events.SHOW_ERROR](f\"Could not add key\\n({str(e)})\")", "def create_keypair(econfig_file=None, region=None, keyname=\"bcbio\"):\n import boto\n import boto.ec2\n if econfig_file:\n keypair_dir = os.path.dirname(econfig_file).replace(\"elasticluster\", \"aws_keypairs\")\n else:\n keypair_dir = os.path.join(os.getcwd(), \"aws_keypairs\")\n if not os.path.exists(keypair_dir):\n os.makedirs(keypair_dir)\n private_key = os.path.join(os.path.join(keypair_dir, keyname))\n new_key = not os.path.exists(private_key)\n if new_key:\n cmd = [\"ssh-keygen\", \"-t\", \"rsa\", \"-N\", \"\", \"-f\", private_key, \"-C\", \"bcbio_aws_keypair\"]\n subprocess.check_call(cmd)\n public_key = private_key + \".pub\"\n if region:\n ec2 = boto.ec2.connect_to_region(region)\n else:\n ec2 = boto.connect_ec2()\n key = ec2.get_key_pair(keyname)\n if key and new_key:\n print(\"Non matching key %s found in AWS, removing.\" % keyname)\n ec2.delete_key_pair(keyname)\n key = None\n if not key:\n print(\"Key %s not found in AWS, importing created key\" % keyname)\n with open(public_key) as in_handle:\n body = in_handle.read()\n try:\n ec2.import_key_pair(keyname, body)\n except TypeError as e:\n body = body.encode('utf-8')\n ec2.import_key_pair(keyname, body)\n return {\"user_key_name\": keyname, \"user_key_private\": private_key,\n \"user_key_public\": public_key}", "def cmd_generate(argv):\n description = inspect.getdoc(cmd_generate)\n parser = ArgumentParser(description=description)\n parser.add_argument(\"-o\",\"--output\", action=\"store\", dest=\"output\",\n default=\"dhall_key\", help=\"outpuf filename\")\n args = parser.parse_args(argv)\n\n import dhall.util\n dhall.util.generate_keys(key=args.output)", "def _AddPerInstanceSshkey(self):\n if self._ssh_public_key_path:\n rsa = self._LoadSshPublicKey(self._ssh_public_key_path)\n logger.info(\"ssh_public_key_path is specified in config: %s, \"\n \"will add the key to the instance.\",\n self._ssh_public_key_path)\n self._metadata[\"sshKeys\"] = \"{0}:{2}\\n{1}:{2}\".format(getpass.getuser(),\n constants.GCE_USER,\n rsa)\n else:\n logger.warning(\n \"ssh_public_key_path is not specified in config, \"\n \"only project-wide key will be effective.\")", "def create_key_pair(self, key_name):\r\n params = {'KeyName':key_name}\r\n return self.get_object('CreateKeyPair', params, KeyPair, verb='POST')" ]
[ "0.67290777", "0.635065", "0.6294067", "0.62377936", "0.61897206", "0.5912573", "0.58597594", "0.5857335", "0.5813296", "0.5799843", "0.57982856", "0.5796951", "0.5789643", "0.56701356", "0.56502396", "0.5586603", "0.5574599", "0.5564199", "0.5562149", "0.55100757", "0.5497694", "0.54795367", "0.5477538", "0.54654604", "0.5463374", "0.5444217", "0.5441464", "0.5434708", "0.54322183", "0.5415867", "0.54146636", "0.5388433", "0.5382078", "0.5380867", "0.53782743", "0.5376606", "0.5366838", "0.53611887", "0.5357806", "0.5345036", "0.5338812", "0.5319382", "0.5301452", "0.5299807", "0.52929324", "0.5280348", "0.52773124", "0.52651626", "0.52644724", "0.5262469", "0.524745", "0.5243768", "0.524289", "0.522334", "0.5211016", "0.5210957", "0.5207756", "0.5195977", "0.51915216", "0.51652205", "0.51600206", "0.5148826", "0.5142505", "0.5121461", "0.51088166", "0.51072127", "0.5100436", "0.5100409", "0.5091131", "0.50839084", "0.50834244", "0.5064365", "0.50582236", "0.5052598", "0.5048572", "0.50389606", "0.5034709", "0.5019304", "0.5018106", "0.5011412", "0.50071484", "0.500521", "0.5001268", "0.49976513", "0.4995651", "0.4989688", "0.498082", "0.49807084", "0.49796256", "0.49750444", "0.49712753", "0.4967101", "0.49664202", "0.49619102", "0.49599424", "0.49564624", "0.49539322", "0.49503723", "0.49486852", "0.49456814" ]
0.69770044
0
Installs pip itself if needed.
def install_pip(): with settings(warn_only=True): run('mkdir $HOME/lib/python2.7') run('easy_install-2.7 pip')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def pipInstall(self):\n\n print \"Does Nothing\"", "def _setup_pip(self, context):\n # We run ensurepip in isolated mode to avoid side effects from\n # environment vars, the current directory and anything else\n # intended for the global Python environment\n cmd = [context.env_exec_cmd, '-Im', 'ensurepip', '--upgrade',\n '--default-pip']\n subprocess.check_output(cmd, stderr=subprocess.STDOUT)", "def pip_installs():\n pip = r'pip-2.7 install --install-option=\"--install-scripts=$PWD/bin\" --install-option=\"--install-lib=$PWD/lib/python2.7\" '\n with settings(warn_only=True):\n run(\"mkdir $HOME/tmp\")\n with cd(remote_dir):\n for installation in install_list:\n run(\"export TEMP=$HOME/tmp && %s %s\" % (pip, installation))\n run(\"echo '#%s' >> $HOME/.bash_profile\" % python_add_str)", "def install_pip(pkg_version=None):\n # FIXME: https://github.com/ansible/ansible-container/issues/919\n\n if pkg_version:\n pkg_name = \"pip==\" + pkg_version\n else:\n pkg_name = \"pip\"\n\n try:\n subprocess.check_call([\"easy_install\", \"--user\", pkg_name])\n except subprocess.CalledProcessError:\n print \"[Error] while installing pip\"", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def pip_packages():\n packages = reduce(lambda a, x: \"%s %s\" % (a, x), PIP_PACKAGES, '')\n sudo(\"pip install %s &> /dev/null\" % packages)", "def install_pkg(pip, package):\n if not os.path.isdir(INSTALL_DIR):\n os.makedirs(INSTALL_DIR)\n pip_cmds = ['mayapy', pip, 'install', package, '--target', INSTALL_DIR, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n print(\"Successfully installed package {}\".format(package))\n if installer.returncode != 0:\n raise RuntimeError(\"Failed to install package: {}, please check logs in: {}\".format(package, DEPENDENCY_INSTALL_LOG))", "def install():\n verun('pip install -r {0}'.format(requirements))", "def _pipInstall(self, directory: Directory) -> None:\n\n pipExec = os.path.join(os.path.dirname(sys.executable), \"pip\")\n\n pipArgs = [sys.executable, pipExec] + self.makePipArgs(directory)\n\n # The platform update is tested for dependencies when it's first uploaded\n # PIP has a bug, when you have updated packages for several dependent files\n # and try to install them all at once, some of the packages don't update.\n pipArgs += ['--no-deps']\n\n pipArgs = ' '.join(pipArgs)\n\n try:\n spawnPty(pipArgs)\n logger.info(\"Peek package update complete.\")\n\n except Exception as e:\n logSpawnException(e)\n\n # Update the detail of the exception and raise it\n e.message = \"Failed to install packages from the new release.\"\n raise", "def update_dependencies():\n pip = env.virtualenv.child('bin', 'pip')\n reqs = env.code_dir.child('deploy-requirements.txt')\n sudo('%s -q install -U pip' % pip)\n sudo('%s -q install -r %s' % (pip, reqs))", "def install(self):\n other_args = list(requirement_args(self._argv, want_other=True))\n archive_path = join(self._temp_path, self._downloaded_filename())\n # -U so it installs whether pip deems the requirement \"satisfied\" or\n # not. This is necessary for GitHub-sourced zips, which change without\n # their version numbers changing.\n run_pip(['install'] + other_args + ['--no-deps', '-U', archive_path])", "def pipupdate():\n\n packages = [d for d in pkg_resources.working_set]\n subprocess.call('pip install --upgrade ' + ' '.join(packages))", "def upgrade_pip():\n out_info(\"Upgrading pip...\")\n pipexe = [sys.executable, \"-m\", \"pip\"]\n pipexe.extend([\"install\", \"--no-cache-dir\", \"-qq\", \"--upgrade\"])\n if not IS_ADMIN and not IS_VIRTUALENV:\n pipexe.append(\"--user\")\n pipexe.append(\"pip\")\n run(pipexe)", "def pip_requirements():\n\n require(\n \"virtualenv_path\",\n \"requirements_path\",\n \"http_proxy\",\n \"https_proxy\",\n \"sudo_user\",\n )\n cmd = \"pip install --quiet --requirement %s\" % env.requirements_path\n\n # append packages url if specified\n if env.get(\"packages_url\") is not None:\n cmd += \" -f %s\" % env.get(\"packages_url\")\n\n with context_managers.proxy(env.http_proxy, env.https_proxy):\n with context_managers.virtualenv(env.virtualenv_path):\n sudo(cmd, user=env.sudo_user)", "def pip(c):\n\n if Path('requirements.txt').exists():\n c.run(\"pip install -r requirements.txt\")\n\n for sp_ns in ns_foreach_task_subdir():\n try:\n sp_ns.tasks.pip(c)\n except UnexpectedExit:\n pass", "def check_pip():\n try:\n import pip\n except ImportError:\n out_error(\"Import pip failed. Please Install python3-pip \"\n \"and try again\")\n exit(1)\n upgrade_pip()\n importlib.reload(pip)\n pip_version = pip.__version__\n del pip\n\n get_installed_packages()\n out_info(\"Installed pip: {}\".format(pip_version))", "def install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])", "def install_pip():\n pip_install_txt = os.path.join(os.path.abspath(os.path.join(__file__, os.pardir)), \"build_test_dependencies.txt\")\n call_subprocess(\"python3 -m pip install -r %s\" % pip_install_txt)\n print(\"Stage install dependencies -- COMPLETED --\")", "def pip_install(\n versioned_package: str,\n install_path: str,\n upgrade: bool = False,\n no_dependencies: bool = False\n) -> None:\n verify_pip_is_installed()\n\n additional_pip_args = []\n if upgrade:\n additional_pip_args.append('--upgrade')\n if no_dependencies:\n additional_pip_args.append('--no-dependencies')\n\n _run_pip_command([\n 'install', versioned_package, '--target', install_path\n ] + additional_pip_args)", "def pip(command):\n with sudo(user='addok'):\n run(f'/srv/addok/venv/bin/pip {command}')", "def pip_install(path: PathType, package_name: str) -> ContextManagerFunctionReturnType[None]:\n # Not using the function `main` from pip._internal because it assumes that once it finished,\n # the process will terminate, and thus it can failed if called multiple times. See\n # https://pip.pypa.io/en/latest/user_guide/#using-pip-from-your-program\n # It actually fails in pip==19.3.1 if called multiple times in the same process (but it works\n # in 20.0).\n # Starting a new process is slower, but it's not a problem if it's not called often.\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", str(path)])\n try:\n yield\n finally:\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"uninstall\", \"-y\", package_name])", "def pip_install_req_file(req_file):\n pip_cmd = 'pip install -q --disable-pip-version-check --exists-action w'\n sh(f\"{pip_cmd} -r {req_file}\")", "def install(self):\n\n self.clean_git_checkout(self.git_repo, '/src')\n\n self.__copy_config_templates();\n\n self.local(\"sudo pip install -r src/requirements.txt --upgrade\")\n\n if not self.is_local():\n PiService.install(self) #copy to remote\n\n self.sudo(\"pip install -r src/requirements.txt --upgrade\")", "def pipinstall(packages):\n\n if isinstance(packages, str):\n if hasattr(pip, 'main'):\n pip.main(['install', packages])\n else:\n pip._internal.main(['install', packages])\n elif isinstance(packages, list):\n for i in enumerate(packages):\n if hasattr(pip, 'main'):\n pip.main(['install', i[1]])\n else:\n pip._internal.main(['install', i[1]])\n else:\n raise TypeError(\"Nor a string or a list was provided.\")", "def pip_install(*args):\n call(WITH_VENV, '.venv', 'pip', 'install', *args)", "def setup(ctx):\r\n ctx.run('pip3 install -r requirements.txt')", "def pipenv(pipenv_cmd='pipenv', python_cmd='python3', use_sudo=True):\n pip(python_cmd, use_sudo)\n if not is_pipenv_installed(version=None, pipenv_cmd=pipenv_cmd):\n install_pipenv(python_cmd=python_cmd)", "def install_requirements():\n local('. fabric_factory/ve/bin/activate; easy_install pip')\n local('. fabric_factory/ve/bin/activate; pip install -r requirements.txt')", "def get_pip():\n return 'pip'", "def pip_install(connection, repo, package, path=None, version=None, site=None, nodeps=False):\n if repo in [\"git\", \"hg\", \"github\"]:\n if repo == \"github\":\n path = \"git://github.com/{0}\".format(path)\n repo = \"git\"\n fmt_egg = \"{0}+{1}/{2}\".format(repo, path, package)\n if version:\n fmt_egg += \"@{0}\".format(version)\n fmt_egg += \"#egg={0}\".format(package)\n elif repo == \"pypi\":\n fmt_egg = package\n if version:\n fmt_egg += \"=={0}\".format(version)\n else:\n print(red(\"Repo type does not exist, use git, hg, or pypi\"))\n raise(NotImplementedError)\n if site is None:\n sites = [site[\"name\"] for site in settings.SITES]\n else:\n sites = [site]\n for site in sites:\n args = \"\"\n if nodeps:\n args += \"--no-dependencies\"\n if connection == \"dev\":\n venv_local(\"pip install --ignore-installed {0} {1}\".format(args, fmt_egg), site)\n elif connection == \"prod\":\n venv(\"pip install --ignore-installed {0} {1}\".format(args, fmt_egg), site)\n else:\n print(red(\"Bad connection type. Use ``dev`` or ``prod``.\"))", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def set_installed_packages():\n global INSTALLED_PACKAGES, REQUIRED_VERSION\n if INSTALLED_PACKAGES:\n return\n\n if os.path.exists(BIN_PYTHON):\n pip = subprocess.Popen(\n (BIN_PYTHON, '-m', 'pip', 'freeze'),\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n (stdout, stderr) = pip.communicate()\n pip.wait()\n\n INSTALLED_PACKAGES = [normalize_package_name(r.decode().split('==')[0].lower()) for r in stdout.split()]\n REQUIRED_VERSION = next((package for package in INSTALLED_PACKAGES if re.match(r'^lore[!<>=]', package)), None)\n if REQUIRED_VERSION:\n REQUIRED_VERSION = re.split(r'[!<>=]', REQUIRED_VERSION)[-1]", "def test_pip_install(self):\n output, _error = self.executor.pip_install(['attrs'])\n self.assertEqual(output, 'attrs installed')", "def install_requirements(self, requirements, extra_pip_args=None):\n\n pip_exe = os.path.join(self.scripts_dir, \"pip\")\n cmd = [pip_exe, \"install\", \"-r\", requirements] + (extra_pip_args or [])\n out = subprocess.Popen(cmd).communicate()", "def install(dependency_dictionary):\n\n distro = guess_system()\n files = (dependency_dictionary.get(distro) or\n dependency_dictionary.get('pip'))\n if not files:\n return None\n can_install = (('pip' in dependency_dictionary and pip_installed) or\n distro in dependency_dictionary)\n if can_install:\n action = show_question(\n files,\n distro in dependency_dictionary,\n 'pip' in dependency_dictionary)\n if action == 'distro':\n callable_ = getattr(vistrails.gui.bundles.installbundle,\n distro.replace('-', '_') + '_install')\n return callable_(files)\n elif action == 'pip':\n if not pip_installed:\n debug.warning(\"Attempted to use pip, but it is not installed.\")\n return False\n return pip_install(dependency_dictionary.get('pip'))\n else:\n return False", "def _pip_install_requirements(\n install_path: str, requirements_path: str\n) -> None:\n verify_pip_is_installed()\n _run_pip_command([\n 'install', '--require-hashes', '--no-deps', '--target',\n install_path, '--no-dependencies', '-r', requirements_path, '--upgrade'\n ])", "def install_and_import(package):\n import importlib\n try:\n importlib.import_module(package)\n except ImportError:\n checkPipInstalled()\n try:\n from pip import main as pipmain\n except:\n from pip._internal import main as pipmain\n pipmain(['install', package])\n finally:\n globals()[package] = importlib.import_module(package)", "def _install_packages(prefix, net_install):\n prefix = os.path.join(os.path.realpath(prefix), \"miniconda\")\n directory = os.path.join(sys._MEIPASS, \"packages\")\n tmp_dir = os.path.join(sys._MEIPASS, \"tmp\")\n if not os.path.exists(tmp_dir):\n os.mkdir(tmp_dir)\n\n if \"Windows\" in platform.system():\n python = os.path.join(prefix, \"python\")\n elif \"Linux\" in platform.system():\n bin_dir = os.path.join(prefix, \"bin\")\n lib_dir = os.path.join(prefix, \"lib\")\n os.putenv('PYTHONPATH', '{}:{}'.format(bin_dir, lib_dir))\n\n # Fix for the SELinux issue on the 32 bit installer\n if \"32bit\" in platform.architecture() and \"armv7l\" not in platform.machine():\n system_call(\n \" \".join([\n \"execstack\",\n \"-c\",\n os.path.join(\n lib_dir,\n \"python2.7\",\n \"lib-dynload\",\n \"_ctypes.so\"\n )\n ]),\n )\n\n python = os.path.join(prefix, \"bin\", \"python\")\n\n print(\"\\tEnsuring pip is installed\")\n system_call(\n \" \".join([python, \"-m\", \"ensurepip\"]),\n )\n\n if net_install:\n for dependency in conda_dependencies[platform.system()][platform.architecture()[0]].keys():\n print(\"### installing: {}\".format(dependency))\n system_call(\n \" \".join([\n python,\n \"-m\",\n \"conda\",\n \"install\",\n dependency,\n ]),\n )\n for dependency in pip_dependencies:\n print(\"### installing: {}\".format(dependency))\n\n system_call(\n \" \".join([\n python,\n \"-m\",\n \"pip\",\n \"install\",\n '\"{}\"'.format(dependency),\n ]),\n )\n else:\n for dependency in conda_dependencies[platform.system()][platform.architecture()[0]].keys():\n print(\"### installing: {}\".format(dependency))\n _dependency = list(\n filter(\n lambda filename: (dependency.lower() in filename.lower()) and (\".tar.bz2\" in filename),\n os.listdir(directory)\n )\n )\n _dependency = os.path.join(directory, _dependency[0])\n\n system_call(\n \" \".join([\n python,\n \"-m\",\n \"conda\",\n \"install\",\n \"--offline\",\n _dependency,\n ]),\n )\n for dependency in pip_dependencies:\n print(\"### installing: {}\".format(dependency))\n _dependency = dependency\n if \"git+\" in dependency:\n _dependency = dependency.split(\"/\")[-1].split(\"#\")[0]\n if \"mast\" in dependency:\n system_call(\n \" \".join([\n python,\n \"-m\",\n \"pip\",\n \"install\",\n \"--no-index\",\n \"--force-reinstall\",\n \"--find-links\",\n directory,\n '\"{}\"'.format(_dependency),\n ]),\n )\n else:\n system_call(\n \" \".join([\n python,\n \"-m\",\n \"pip\",\n \"install\",\n \"--upgrade\",\n \"--no-index\",\n \"--find-links\",\n directory,\n '\"{}\"'.format(_dependency),\n ]),\n )", "def make_pip_install_command(packages):\n return \"pip install \" + \" \".join(\"'{}'\".format(x) for x in packages)", "def test_pip_install(salt_call_cli):\n dep = \"PyGithub\"\n repo = \"https://github.com/saltstack/salt.git\"\n\n try:\n install = salt_call_cli.run(\"--local\", \"pip.install\", dep)\n assert install.returncode == 0\n\n use_lib = salt_call_cli.run(\"--local\", \"github.get_repo_info\", repo)\n assert \"Authentication information could\" in use_lib.stderr\n finally:\n ret = salt_call_cli.run(\"--local\", \"pip.uninstall\", dep)\n assert ret.returncode == 0\n use_lib = salt_call_cli.run(\"--local\", \"github.get_repo_info\", repo)\n assert \"The github execution module cannot be loaded\" in use_lib.stderr", "def call_pip(pip_args: List[str], timeout: float = 300, retry: bool = False) -> None:\n command = [sys.executable, \"-m\", \"pip\", *pip_args]\n\n result = subprocess.run( # nosec\n command, stdout=PIPE, stderr=PIPE, timeout=timeout, check=False\n )\n if result.returncode == 1 and retry:\n # try a second time\n result = subprocess.run( # nosec\n command, stdout=PIPE, stderr=PIPE, timeout=timeout, check=False\n )\n enforce(\n result.returncode == 0,\n f\"pip install failed. Return code != 0: stderr is {str(result.stderr)}\",\n )", "def get_pip():\n return path.join(TaskCreator.bin_dir, \"pip\")", "def upgrade(self, dependencies = False):\n pip_args = []\n proxy = environ.get('http_proxy')\n if proxy:\n pip_args.append('--proxy')\n pip_args.append(proxy)\n pip_args.append('install')\n pip_args.append(self.pkg)\n if self.index is not None:\n pip_args.append('-i')\n pip_args.append(\"{}/\".format(self.index))\n if not dependencies:\n pip_args.append(\"--no-deps\")\n if self._get_current() != [-1]:\n pip_args.append(\"--upgrade\")\n a=pip.main(pip_args)\n return a==0", "def checkPipInstalled():\n\n PIP_INSTALLED = True\n\n try:\n import pip\n except ImportError:\n PIP_INSTALLED = False\n\n if not PIP_INSTALLED:\n raise ImportError('pip is not installed.')", "def install():\n\n if (Path.cwd() / \"src\" / \"environment.yml\").is_file():\n call([\"conda\", \"install\", \"--file\", \"src/environment.yml\", \"--yes\"])\n\n pip_command = [\"install\", \"-U\", \"-r\", \"src/requirements.txt\"]\n\n if os.name == \"posix\":\n python_call(\"pip\", pip_command)\n else:\n command = [sys.executable, \"-m\", \"pip\"] + pip_command\n subprocess.Popen(command, creationflags=subprocess.CREATE_NEW_CONSOLE)", "def install_deps():\n dist = check_distribution()\n if dist == Distribution.TEXLIVE:\n texlive_install_deps()\n elif dist == Distribution.MIKTEX:\n miktex_install_deps()\n\n install_pygments()", "def _prepare_cli(self):\n self.logger.info('installing cli...')\n\n self._get_resource(self.cli_package_url, ops='-LO', sudo=True)\n self._get_resource('https://bootstrap.pypa.io/get-pip.py',\n pipe_command='sudo python2.7 -')\n self._execute_command('pip install virtualenv', sudo=True)\n\n last_ind = self.cli_package_url.rindex('/')\n return self.cli_package_url[last_ind + 1:]", "def install_cached_package(self, package_name):\n self._log.info(\"Installing package {!r} from talus pypi\".format(package_name))\n pinfo = self.cache[\"pypi\"][package_name]\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n\n try:\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n package_name\n ])\n except SystemExit as e:\n raise Exception(\"Is SystemExit expected?\")", "def pre_install(self, installable_pkgs):\n pass", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def install_requirements(self):\n logging.debug('Attempting to pip install requirements to build dir...')\n try:\n pip_install_cmd = Lambda.PIP_INSTALL_REQUIREMENTS_TMPL.format(requirements=self.requirements_file,\n build_dir=self.build_dir)\n completed_process = subprocess.run(pip_install_cmd, shell=True, check=True,\n stdout=subprocess.PIPE)\n\n logging.debug('Successful pip install.')\n logging.debug('stdout: {}'.format(completed_process.stdout))\n logging.debug('stderr: {}'.format(completed_process.stderr))\n except subprocess.CalledProcessError as e:\n logging.error('Failed to install pip requirements to build dir..')\n raise e", "def sync(to_be_installed, to_be_uninstalled, verbose=False):\n\n flags = []\n\n if not verbose:\n flags.append('-q')\n\n if to_be_uninstalled:\n pip.main([\"uninstall\", '-y'] + flags + [str(req) for req in to_be_uninstalled])\n\n if to_be_installed:\n pip.main([\"install\"] + flags + [str(req) for req in to_be_installed])", "def sub_install_packages():\n sudo('apt-get update') # Update repository links\n sudo('apt-get -y upgrade') # Upgrade the system\n package_str = ' '.join(INSTALL_PACKAGES)\n sudo('apt-get -y install ' + package_str) # Install the packages", "def sync_virtualenv(ctx):\n if not path.isfile(\"./pyenv/bin/pip\"):\n ctx.run(\"virtualenv --no-site-packages --python=/usr/bin/python2.7 pyenv\")\n ctx.run(\"PIP_DOWNLOAD_CACHE=/var/tmp/ ./pyenv/bin/pip install -r requirements.txt\")\n print(\"\"\"\n Installation completed. Please check any error messages above.\n\n If you are going to use `openstack` or ansible directly on the command line, run\n\n . ./pyenv/bin/activate\n\n or even add it to your ~/.bashrc\n \"\"\")", "def intallpack(package_name: str, version: str='', nodeps: bool=False) -> None:\n\t# construct package name with version if needed\n\tinstallstr = package_name if not version else \"{}=={}\".format(package_name, version)\n\n\t# construct command and flags\n\tcommand = ['pip', 'install']\n\tif nodeps:\n\t\tcommand.append('--no-deps')\n\tcommand.append(installstr)\n\n\t# execute command\n\tresp = subprocess.call(command)", "def required():\n pip = path(\"bin/pip\")\n if not pip.exists():\n sh('%s install -E tg2env -r normal-reqs.txt --extra-index-url=http://www.turbogears.org/2.0/downloads/current/index' % pip)\n call_pavement('pavement.py', 'develop')", "def install():\n PackCommandExecutor().pack()\n InstallCommandExecutor().install()", "def install_namespace_pkg(pip, package, namespace):\n temp_target = os.path.join(INSTALL_DIR, 'temp-target')\n if not os.path.isdir(temp_target):\n os.makedirs(temp_target)\n pip_cmds = ['mayapy', pip, 'install', package, '--no-deps', '--target', temp_target, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n if installer.returncode == 0:\n try:\n dir_util.copy_tree(os.path.join(temp_target, namespace), os.path.join(INSTALL_DIR, namespace))\n except Exception as exp:\n print(exp)\n try:\n shutil.rmtree(temp_target)\n print(\"Successfully installed namespace package {} to namespace {}\".format(package, namespace))\n except Exception as exp:\n print(exp)\n else:\n raise RuntimeError(\"Failed to install package: {} to namespace: {}, please check logs in: {}\".format(package, namespace, DEPENDENCY_INSTALL_LOG))", "def install_requirements():\n req_path = os.path.join(vlogger_dir, \"requirements.txt\")\n subprocess.call([\"pip\", \"install\", \"-r\", req_path])", "def install_requirements():\n with cd(env.code_dir):\n with _virtualenv():\n sudo('pip install -r requirements.txt', pty=True)", "def install():\n return InstallGit()", "def _install(self):\n # Default implementation\n for pm_name, package in self._provider_package.items():\n if helpers[pm_name]:\n helpers[pm_name].install_package(package)\n return\n raise self.unsure_how_to_install()", "def test_pip_install_with_extra_pypi_servers(self):\n package_manager.install_lib('foo')\n self.assertEqual(run_commands(), [\n 'pip install -E %s -r %s --extra-index-url=%s --extra-index-url=%s' % (\n self.test_env,\n self.req_path,\n 'http://localhost:8000/simple',\n 'http://pypi.internal.com/simple'\n )\n ])", "def poetry(poetry_cmd='poetry', python_cmd='python3', use_sudo=True):\n pip(python_cmd, use_sudo)\n if not is_poetry_installed(version=None, poetry_cmd=poetry_cmd):\n install_poetry(python_cmd=python_cmd)", "def install_requirements():\r\n if env.hosts:\r\n run ('cd %(path)s %(command_join)s env/bin/pip install -r current-release/requirements.txt' % env)\r\n else:\r\n local('%spip install -r requirements.txt' % virtualenv_bin, capture=False)", "def pip_requirements(connection, site=None):\n if site is None:\n sites = [site[\"name\"] for site in settings.SITES]\n else:\n sites = [site]\n for site in sites:\n print(green(\"started pip install for {0}\".format(site)))\n if connection == \"dev\":\n venv_local(\"pip install --upgrade pip\", site)\n venv_local(\"pip install --quiet --requirement=requirements.txt\", site)\n elif connection == \"prod\":\n venv(\"pip install --upgrade pip\", site)\n venv(\"pip install --quiet --requirement=requirements.txt\", site)\n else:\n print(red(\"Bad connection type. Use ``dev`` or ``prod``.\"))\n print(green(\"finished pip install for {0}\".format(site)))", "def _install_dependencies(self):\n\n requirements_file = self.app_directory.joinpath('requirements.txt')\n\n package_copy_required = False\n if requirements_file.exists():\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '-r',\n str(requirements_file),\n '-t',\n str(self.build_directory),\n ]\n package_copy_required = True\n else:\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '.',\n '-t',\n str(self.build_directory),\n ]\n\n logger.debug('Running subprocess cmds: %s', cmd)\n\n try:\n _ = subprocess.run(cmd, check=True)\n except Exception:\n logger.error('Pip failed to install the app using cmd=[%s].', cmd)\n raise\n\n if package_copy_required:\n shutil.copytree(\n self.package_dir, self.build_directory.joinpath(self.package_name)\n )", "def _run_pip_command(cmd_parts: List[str]) -> None:\n # The call to python -m is used to ensure that Python and Pip versions are\n # compatible.\n command = [sys.executable, '-m', 'pip'] + cmd_parts\n process = subprocess.Popen(\n command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n encoding='utf-8')\n stdout, stderr = process.communicate()\n if process.returncode == 0:\n print(stdout)\n elif 'can\\'t combine user with prefix' in stderr:\n print('Trying by setting --user and --prefix flags.')\n subprocess.check_call(\n command + ['--user', '--prefix=', '--system'])\n else:\n print(stderr)\n print('Refer to https://github.com/oppia/oppia/wiki/Troubleshooting')\n raise Exception('Error installing package')", "def install():\n sudo('apt-get install python')", "def is_installed(self):\r\n return bool(self.pip_requirement.satisfied_by)", "def install(self, no_dependencies: bool = True):\n return PackageHelper.install_package(name=self.name, no_dependencies=no_dependencies)", "def install_dependencies():\n\n # check python version and verify we are using Python 3\n if sys.version[0] < '3':\n print(\"ERROR: python version 3 required. You are using version \"\n \"{}\".format(sys.version))\n print(\"You must install python 3 from https://www.python.org\")\n print(\"Make sure to check the 'pip' package manager option when\")\n print(\"installing python\")\n return\n try:\n import pip\n except ModuleNotFoundError:\n print(\"The python 'pip' package manager is required.\")\n print(\"Go to https://www.python.org and download Python 3\")\n print(\"When re-installing, select 'modify' and make sure\")\n print(\"to check the 'pip' option\")\n return\n\n print(\"Python 3 and pip is installed\")\n\n # upgrade/install dependencies such as robot framework\n subprocess.run([\"python\", \"-m\", \"pip\", \"install\", \"-q\", \"--user\",\n \"--no-warn-script-location\", \"-r\",\n os.path.join(os.path.curdir, \"requirements.txt\")],\n shell=True, check=True)\n print(\"Robot framework is installed and up to date\")\n print(\"PyQT5 is installed and up to date\")", "def install_requirements():\n _git_pull()\n _install_requirements()\n _syncdb()\n _migrate()\n _restart_webserver()", "def _install_packages(packages):\n for package in packages:\n cuisine.package_ensure(package)", "def verify_pip_is_installed() -> None:\n print('Checking if pip is installed on the local machine')\n try:\n # We are just checking that pip is available for import, so it's\n # okay that we don't use it.\n import pip # pylint: disable=unused-import\n except ImportError as e:\n common.print_each_string_after_two_new_lines([\n 'Pip is required to install Oppia dependencies, but pip wasn\\'t '\n 'found on your local machine.',\n 'Please see \\'Installing Oppia\\' on the Oppia developers\\' wiki '\n 'page:'])\n\n if common.is_mac_os():\n print(\n 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-'\n 'OS%29')\n elif common.is_linux_os():\n print(\n 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux'\n '%29')\n else:\n print(\n 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28'\n 'Windows%29')\n raise ImportError('Error importing pip: %s' % e) from e", "def test_install(ctx):\n ctx.run(\"pip uninstall {PROJECT_NAME} --yes\".format(PROJECT_NAME=PROJECT_NAME), warn=True)\n ctx.run(\"pip install --no-cache-dir --no-index --find-links=file:./dist {PROJECT_NAME}\".format(PROJECT_NAME=PROJECT_NAME))\n ctx.run(\"pip uninstall {PROJECT_NAME} --yes\".format(PROJECT_NAME=PROJECT_NAME))", "def bootstrap(): # pragma: no cover, exercised via test_bootstrap() functional test\n pspec = PackageSpec(CFG, \"%s==%s\" % (PICKLEY, __version__))\n grand_parent = runez.parent_folder(runez.parent_folder(__file__))\n if grand_parent and grand_parent.endswith(\".whl\"):\n # We are indeed running from pex\n setup_audit_log()\n python = CFG.find_python(\"/usr/bin/python3\") # Prefer system py3, for stability\n if not python or python.problem:\n python = pspec.python\n\n LOG.debug(\"Bootstrapping pickley %s with %s (re-installing as venv instead of pex package)\" % (pspec.version, python))\n target = pspec.install_path\n venv = PythonVenv(target, python, pspec.index)\n venv.pip_install(\"wheel\")\n with runez.TempFolder():\n venv.run_python(\"-mwheel\", \"pack\", grand_parent)\n names = os.listdir(\".\")\n assert len(names) == 1\n venv.pip_install(names[0])\n\n delivery = DeliveryMethod.delivery_method_by_name(pspec.settings.delivery)\n return delivery.install(pspec, venv, {PICKLEY: \"bootstrapped\"})\n\n else:\n manifest = pspec.get_manifest()\n if not manifest:\n # We're not running from pex, but we need to re-install pickley with latest version, so it gets a manifest etc\n return perform_install(pspec, is_upgrade=False, quiet=False)", "def install_deps():\n click.echo(\"install_deps\")", "def install_packages():\n with open(\"requirements.txt\", \"w\") as requirements_file:\n subprocess.run([\"pipenv\", \"lock\", \"-r\"], stdout=requirements_file)\n\n subprocess.run(\n [\"pip\", \"install\", \"-r\", \"requirements.txt\", \"--no-deps\", \"-t\", BUILD_DIR]\n )", "def install_requirements(self, rel_path):\n self._log.debug(\"Installing requirements {}\".format(rel_path))\n\n rel_path = rel_path.replace(\"/\", os.path.sep)\n full_path = os.path.join(self._code_dir, rel_path)\n\n with open(full_path, \"rb\") as f:\n data = f.read()\n\n # this takes a fair amount of time sometimes, so if there's an\n # empty requirements.txt file, skip installing it\n actual_req_count = 0\n for line in data.split(\"\\n\"):\n line = line.strip()\n if line == \"\" or line.startswith(\"#\"):\n continue\n actual_req_count += 1\n if actual_req_count == 0:\n self._log.debug(\"Empty requirements.txt, skipping\")\n return\n\n try:\n threading.local().indentation = 0\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n \"-r\", full_path\n ])\n \n # this is expected - pip.main will *always* exit\n except SystemExit as e:\n # TODO\n raise Exception(\"Is SystemExit normal?\")\n\n threading.local().indentation = 0", "def pre_install_pkg(self, installable_pkg):\n pass", "def install(self):\n # This installs the packages defined in self.packages\n super().install()\n # Do any other installation work that is needed. If a license key is\n # required then use the custom_assess_status_check() function below to\n # determine whether it is needed.\n # This assess_status() will determine what status the charm is at after\n # install.\n self.assess_status()", "def install(repo, package, python, editable):\n if repo.install(package, python, editable):\n click.echo('Done.')", "def sub_install_virtualenv():\n sudo('pip install virtualenv') # Need sudo b/c installing to system Python", "def install_packages(self):\n for package in self.packages:\n utils.exec_cmd('yum install -v -y {0}'.format(package))", "def _install(self):\n\n pass", "def bootstrap():\n sub_install_packages()\n sub_install_virtualenv()\n sub_create_virtualenv()\n sub_install_python_requirements()", "def install_requirements():\n require(\"release\", provided_by=[deploy])\n with cd(\"%(path)s\" % env):\n sudo(\"./bin/pip install -r ./releases/%(release)s/requirements.txt\" % env)", "def peep_install(argv):\n output = []\n #out = output.append\n out = print\n reqs = []\n try:\n req_paths = list(requirement_args(argv, want_paths=True))\n if not req_paths:\n out(\"You have to specify one or more requirements files with the -r option, because\\n\"\n \"otherwise there's nowhere for peep to look up the hashes.\\n\")\n return COMMAND_LINE_ERROR\n\n # We're a \"peep install\" command, and we have some requirement paths.\n reqs = list(chain.from_iterable(\n downloaded_reqs_from_path(path, argv)\n for path in req_paths))\n buckets = bucket(reqs, lambda r: r.__class__)\n\n # Skip a line after pip's \"Cleaning up...\" so the important stuff\n # stands out:\n if any(buckets[b] for b in ERROR_CLASSES):\n out('\\n')\n\n printers = (lambda r: out(r.head()),\n lambda r: out(r.error() + '\\n'),\n lambda r: out(r.foot()))\n for c in ERROR_CLASSES:\n first_every_last(buckets[c], *printers)\n\n if any(buckets[b] for b in ERROR_CLASSES):\n out('-------------------------------\\n'\n 'Not proceeding to installation.\\n')\n return SOMETHING_WENT_WRONG\n else:\n for req in buckets[InstallableReq]:\n req.install()\n\n first_every_last(buckets[SatisfiedReq], *printers)\n\n return ITS_FINE_ITS_FINE\n except (UnsupportedRequirementError, DownloadError) as exc:\n out(str(exc))\n return SOMETHING_WENT_WRONG\n finally:\n for req in reqs:\n req.dispose()\n print(''.join(output))", "def install_dependencies(self):\n return False", "def update_requirements():\n\n with virtualenv(VIRTUALENV_PATH):\n cmd = ['pip install']\n cmd += ['--requirement %s' % os.path.join(CODE_DIR,'requirements.txt')]\n run(' '.join(cmd))", "def activate(specifier):\n try:\n for distro in require(specifier):\n distro.activate()\n except (VersionConflict, DistributionNotFound):\n raise RuntimeError('The installed version of pip is too old; peep '\n 'requires ' + specifier)", "def installRequiredPackages(self, force=False):\n # Need to install if forced or any packages cannot be imported\n needToInstall = force\n if not needToInstall:\n try:\n import jupyter\n import jupyterlab\n import ipywidgets\n import pandas\n import ipyevents\n import ipycanvas\n except:\n needToInstall = True\n\n if needToInstall:\n # Install required packages\n import os\n if os.name != 'nt':\n # PIL may be corrupted on linux, reinstall from pillow\n slicer.util.pip_install('--upgrade pillow --force-reinstall')\n\n slicer.util.pip_install(\"jupyter jupyterlab ipywidgets pandas ipyevents ipycanvas --no-warn-script-location\")\n\n # Install Slicer Jupyter kernel\n # Create Slicer kernel\n slicer.modules.jupyterkernel.updateKernelSpec()\n # Install Slicer kernel\n import jupyter_client\n jupyter_client.kernelspec.KernelSpecManager().install_kernel_spec(slicer.modules.jupyterkernel.kernelSpecPath(), user=True, replace=True)", "def _resolve_libraries(self):\n # Merge any custom libs needed by rules, etc\n libs_to_install = self.REQUIRED_LIBS.union(\n set(self.config['global']['general'].get('third_party_libraries', []))\n )\n\n LOGGER.info('Installing libraries: %s', ', '.join(libs_to_install))\n pip_command = ['pip', 'install']\n pip_command.extend(libs_to_install)\n pip_command.extend(['--no-cache-dir', '--upgrade', '--target', self.temp_package_path])\n\n # Return True if the pip command is successfully run\n return run_command(pip_command, cwd=self.temp_package_path, quiet=True)", "def requires(*requirements, **kwargs):\n if '/.tox/' in sys.executable:\n venv = os.path.dirname(os.path.dirname(sys.executable))\n elif env.virtual_env: # pragma: no cover\n venv = env.chut_virtualenv = env.virtual_env\n else: # pragma: no cover\n venv = os.path.expanduser(kwargs.get('venv', '~/.chut/venv'))\n if not env.pip_download_cache: # pragma: no cover\n env.pip_download_cache = os.path.expanduser('~/.chut/cache')\n sh.mkdir('-p', env.pip_download_cache)\n bin_dir = os.path.join(venv, 'bin')\n if bin_dir not in env.path: # pragma: no cover\n env.path = [bin_dir] + env.path\n requirements = list(requirements)\n if 'chut' not in requirements:\n requirements.insert(0, 'chut')\n if not test.d(venv): # pragma: no cover\n import urllib\n url = 'https://raw.github.com/pypa/virtualenv/master/virtualenv.py'\n urllib.urlretrieve(url, '/tmp/_virtualenv.py')\n sh[sys.executable]('-S /tmp/_virtualenv.py', venv) > 1\n sh.rm('/tmp/_virtualenv*', shell=True)\n info('Installing %s...' % ', '.join(requirements))\n sh.pip('install -qM', *requirements) > 1\n elif env.chut_virtualenv:\n upgrade = '--upgrade' in sys.argv\n if (env.chut_upgrade or upgrade): # pragma: no cover\n installed = ''\n else:\n installed = str(sh.pip('freeze')).lower()\n requirements = [r for r in requirements if r.lower() not in installed]\n if requirements: # pragma: no cover\n info('Updating %s...' % ', '.join(requirements))\n sh.pip('install -qM --upgrade', *requirements) > 1\n executable = os.path.join(bin_dir, 'python')\n if not env.chut_virtualenv: # pragma: no cover\n env.chut_virtualenv = venv\n os.execve(executable, [executable] + sys.argv, env)", "def install(env, requirements, args, quiet=False):\n if os.path.isfile(requirements):\n args += ('-r', requirements)\n label = 'project'\n else:\n args += ('-U', '-e', '.')\n label = 'library'\n\n if not quiet:\n print('== Step 2. Install {0} =='.format(label))\n\n pip_cmd(env, ('install', ) + args, echo=not quiet)\n\n if not quiet:\n print()\n\n return True", "def install_from_repository(self) -> None:\n self.sort_packages()\n\n # Install recommended packages\n if self.recommended_packages:\n self.list_packages(self.recommended_packages, title=\"package\")\n for package in self.recommended_packages:\n try:\n self.perform_operation(\n Command('install'),\n Command(package)\n )\n except tmt.utils.RunError as error:\n self.debug(f\"Package installation failed: {error}\")\n self.warn(f\"Unable to install recommended package '{package}'.\")\n continue\n\n # Install required packages\n if self.required_packages:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.required_packages, title=\"package\")\n )", "def install_deps():\n pipenv_dev = run('pipenv install --dev'.split(), check=True)\n print('Installed dependencies and virtual environment. Type `pipenv shell` to activate later.')", "def run_pip(initial_args):\n status_code = pip.main(initial_args)\n\n # Clear out the registrations in the pip \"logger\" singleton. Otherwise,\n # loggers keep getting appended to it with every run. Pip assumes only one\n # command invocation will happen per interpreter lifetime.\n logger.consumers = []\n\n if status_code:\n raise PipException(status_code)" ]
[ "0.7695657", "0.74213576", "0.7209003", "0.7197479", "0.7096822", "0.7030998", "0.69859535", "0.6862794", "0.68612194", "0.6831305", "0.6793315", "0.6789067", "0.6607342", "0.6605917", "0.65682715", "0.65639573", "0.6522661", "0.64693004", "0.64424676", "0.6357721", "0.63213474", "0.6316437", "0.63062376", "0.62823737", "0.62380123", "0.6219111", "0.6211868", "0.6110536", "0.6106591", "0.61049366", "0.61035365", "0.6098832", "0.6090937", "0.6070086", "0.6064883", "0.60602254", "0.6051671", "0.6042284", "0.6015993", "0.6010365", "0.59926385", "0.59755236", "0.5938876", "0.59380686", "0.5928046", "0.5921587", "0.5918503", "0.5880606", "0.5873628", "0.5865994", "0.58641976", "0.58485913", "0.5848122", "0.58438283", "0.5838862", "0.58341634", "0.5824719", "0.5809283", "0.58089864", "0.5785634", "0.5776847", "0.5768552", "0.5755232", "0.57487", "0.5742615", "0.5741111", "0.5737931", "0.57106847", "0.57105446", "0.57080024", "0.5702704", "0.5702115", "0.56915325", "0.5665726", "0.5658014", "0.5649019", "0.5649003", "0.56478167", "0.56462836", "0.5631903", "0.56315184", "0.5623989", "0.5623088", "0.5610313", "0.5601975", "0.5580156", "0.5575954", "0.5568938", "0.5562341", "0.55529237", "0.5551806", "0.55510914", "0.55461955", "0.5545998", "0.55438066", "0.55391914", "0.55311877", "0.55268544", "0.55163664", "0.55041146" ]
0.7709263
0
Creates a new git repo on the server (do not include the .git ending in git_repo_name)
def create_prod_git_repo(git_repo_name): with cd(git_dir): run("git init --bare %s.git && cd %s.git && git config http.receivepack true" % (git_repo_name,git_repo_name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_storer_git_repo():\n # first make teh destination directory\n rel_repo_path = vmcheckerpaths.repository\n abs_repo_path = vmcheckerpaths.abspath(rel_repo_path)\n _mkdir_if_not_exist(abs_repo_path)\n\n # then, if missing, initialize a git repo in it.\n repo_path_git = os.path.join(abs_repo_path, '.git')\n if not(os.path.isdir(repo_path_git)):\n # no git repo found in the dir.\n try:\n env = os.environ\n env['GIT_DIR'] = repo_path_git\n check_call(['git', 'init'], env=env)\n except CalledProcessError:\n logging.error('cannot create git repo in %s' % repo_path_git)", "def cmd_create(self):\n self.repo.create()\n\n # Add .gitignore.\n self.repo.add_files({'.gitignore': '.swp\\n'}, FIRST_COMMIT_MSG)\n\n # Create the etc and timestamps branches.\n self.repo.checkout('etc', create=True)\n self.repo.checkout('timestamps', create=True)\n\n self.repo.checkout('master')\n self.repo.init()\n self.update_repository()\n print('Git repository created at %s' % self.repodir)", "def command_new_repo(self):\n repoinit.new_repo(*self.args())", "def _create_github_repo(self):\n\n repo_dir = join(self.temp_dir, 'repo')\n subprocess.check_output(['git', 'init', repo_dir])\n\n subprocess.check_output(\n ['git', 'config', 'user.email', os.environ['GIT_EMAIL']],\n cwd=repo_dir\n )\n subprocess.check_output(\n ['git', 'config', 'user.name', os.environ['GIT_NAME']],\n cwd=repo_dir\n )\n\n content = statiki.get_travis_files_content(TEST_REPO, 'BOGUS', {})\n\n for info in content:\n path = join(repo_dir, info['name'])\n with open(path, 'w') as f:\n f.write(info['content'])\n\n subprocess.check_output(['git', 'add', path], cwd=repo_dir)\n subprocess.check_output(\n ['git', 'commit', '-m', '%s' % info['message']], cwd=repo_dir\n )\n\n subprocess.check_output(\n shlex.split('git remote add origin ..'), cwd=repo_dir\n )\n\n return repo_dir", "def create_repository(cfg):\n if os.path.isdir(cfg[\"repo_dir\"]):\n shutil.rmtree(cfg[\"repo_dir\"], ignore_errors=True)\n return Repo.init(cfg[\"repo_dir\"])", "def create_update_gitdir():\n if not os.path.exists(gitdname):\n retcode = subprocess.call('git clone '+repo, shell=True)\n if retcode != 0:\n msg = \"\"\"There was a problem cloning the repo\"\"\"\n raise Exception(msg)\n else: # directory exists, can't pull if you're not on a branch\n # just delete it and clone again. Lazy but clean solution.\n shutil.rmtree(gitdname)\n create_update_gitdir()", "def create_code_repository(CodeRepositoryName=None, GitConfig=None):\n pass", "def _clone_gitrepo():\n # Puts git repo in ~/.ssh/config to avoid interaction due to missing known_hosts\n git_server = urllib.splituser(urllib.splittype(env.project['git_repo'])[0])[1]\n if not files.exists('~/.ssh/config') or not files.contains('~/.ssh/config', git_server):\n files.append('~/.ssh/config', ['host %s' % git_server, ' StrictHostKeyChecking no'])\n\n branch = env.project.get('git_branch', 'master')\n if files.exists(_interpolate(DJANGO_PROJECT_DIR)):\n print _interpolate('project %(project)s already exists, updating')\n remote('git pull origin %s' % branch)\n else:\n with cd(_interpolate(VIRTUALENV_DIR)):\n run(_interpolate('git clone %(git_repo)s %(project)s'))\n if branch != 'master':\n remote('git fetch origin %s:%s' % (branch, branch))\n remote('git checkout %s' % branch)", "def create_repo(self, repo):\n return self.user_con.create_repo(repo=repo)", "def create_from_git(self, token: Any, repo: str):\n params = [token, repo, ]\n method = \"ProjectAPI.CreateFromGit\"\n self.__add_request(method, params, lambda payload: Definition.from_json(payload))", "def create_repository(organization_name, repository_name, \n template_repository=None, travis_ci=True):\n\n # Create a GitHub repository.\n github_client = GitHub(os.environ.get(\"GITHUB_TOKEN\"))\n\n organization = github_client.get_organization(organization_name)\n new_repository = organization.create_repo(repository_name)\n new_repository_uri = \"/\".join([organization_name, repository_name])\n\n # Enable continuous integration.\n if travis_ci:\n enable_continuous_integration(new_repository_uri)\n\n # Copy from a template.\n if template_repository: \n template = github_client.get_repo(template_repository)\n\n temp_folder = mkdtemp()\n subprocess.Popen(\n [\"git\", \"clone\", template.clone_url], cwd=temp_folder).wait()\n\n # Remove .git directory, create new one, add files, commit and push\n commands = [\n \"rm -Rf .git/\",\n \"git init\",\n \"git add -f -A\",\n \"git remote add origin [email protected]:{uri}.git\"\\\n .format(uri=new_repository_uri),\n (\"git\", \"commit\", \"-m\", \"Initial commit using {} template\"\\\n .format(template_repository)),\n \"git push -u origin master\"\n ]\n\n cwd = glob(os.path.join(temp_folder, \"*\"))[0]\n for command in commands:\n args = command.split() if isinstance(command, str) else command\n subprocess.Popen(args, cwd=cwd).wait()\n\n return new_repository", "def create_bare_repo(self, domain):\n\n domain_dir = self.get_domaindir(domain)\n www_dir = domain_dir + \"/www\"\n www_git = domain_dir + \"/www.git\"\n hook_post_receive_file = www_git + \"/hooks/post-receive\"\n\n if not os.path.exists(www_git):\n os.makedirs(www_git)\n git_init_command = \"cd \" + www_git\n git_init_command += \" && git init --bare\"\n subprocess.call(git_init_command, shell=True)\n\n if not os.path.isfile(hook_post_receive_file):\n with open(hook_post_receive_file, \"w\") as file:\n post_receive_content = \"#!/bin/sh\"\n post_receive_content += \"\\nGIT_WORK_TREE=\" + www_dir\n post_receive_content += \" git checkout -f\"\n file.write(post_receive_content)\n subprocess.call(\"chmod +x \" + hook_post_receive_file, shell=True)", "def api_repo_create():\n form = NewRepoForm()\n if form.validate_on_submit():\n # On the miniscule chance we generate a non-unique access key, loop and try again.\n success = False\n while not success:\n new_repo = Repo.create(\n pass_phrase = form.pass_phrase.data,\n title = form.title.data,\n description = form.description.data,\n is_private = form.is_private.data\n )\n db.session.add(new_repo)\n try:\n db.session.commit()\n success = True\n except:\n db.session.rollback()\n success = False\n session['working_repo'] = new_repo.access_key\n return jsonify(message='success', created=new_repo.access_key)\n else:\n return jsonify(message=\"failed\", errors=form.errors_to_json()), 400", "def create_repo_cli(api_client, url, provider, path):\n content = ReposApi(api_client).create(url, provider, path)\n click.echo(pretty_format(content))", "def create_clowder_repo(self, url, branch, depth=0):\n\n if self.existing_git_repository(self.repo_path):\n return\n self._init_repo()\n self._create_remote(self.remote, url, remove_dir=True)\n self._checkout_new_repo_branch(branch, depth)", "def newrepo():\n form = AddRepoForm()\n if form.validate_on_submit():\n\n # make the directory for this package\n os.mkdir(DATA + form.name.data)\n\n flash('Repo created successfully')\n\n # redirect to the login page\n return redirect(url_for('home.dashboard'))\n\n # load registration template\n return render_template('home/add.html', form=form, title='Local Repo', target=\"add\")", "def new_repo(req, source, psp_dir, url_helper=None):\n req.content_type = 'text/html'\n repo_dir = req.filename.rsplit('/', 1)[0]\n files = [f for f in os.listdir(repo_dir) if f[-3:] == '.h5']\n top_level = psp.PSP(req, filename=psp_dir+'new_repo.psp')\n top_level.run({'context': req.uri,\n 'files': files})", "def repo_new(request):\n if request.method != 'POST':\n form = RepoForm()\n return respond(request, 'repo_new.html', {'form': form})\n form = RepoForm(request.POST)\n errors = form.errors\n if not errors:\n try:\n repo = models.Repository(\n name=form.cleaned_data.get('name'),\n url=form.cleaned_data.get('url'),\n guid=form.cleaned_data.get('guid'),\n )\n except (db.BadValueError, ValueError) as err:\n errors['__all__'] = unicode(err)\n if errors:\n return respond(request, 'repo_new.html', {'form': form})\n repo.put()\n branch_url = repo.url\n if not branch_url.endswith('/'):\n branch_url += '/'\n branch_url += 'trunk/'\n branch = models.Branch(repo_key=repo.key, repo_name=repo.name,\n category='*trunk*', name='Trunk',\n url=branch_url)\n branch.put()\n return HttpResponseRedirect(reverse(repos))", "def create_repo_clone(self, path, https):\n _, _, login, remote_dir = path.split('/', 3) # 3 x '/' before real path\n remote_dir = os.path.dirname(remote_dir) # final segment from clone\n print remote_dir\n cmd = ['ssh', login, 'mkdir', '-p', remote_dir]\n print cmd\n check_output(cmd)\n cmd = ['ssh', login, 'cd', remote_dir, ';', 'hg', 'clone', https]\n #cmd = ['ssh', login, 'cd {} ; hg clone {}'.format(remote_dir, path.replace('ssh:', 'https:'))]\n print cmd\n check_output(cmd)", "def git_project(soup, github_user, github_pass, github_repo, github_name):\n giturl = 'https://{user}:{password}@github.com/{user}/{repo}.git'.format(\n user=github_user, password=github_pass, repo=github_repo\n )\n oldcwd = os.getcwd()\n tmpdir = tempfile.mkdtemp()\n gitdir = os.path.join(tmpdir, github_repo)\n cmd = 'git clone {} {}'.format(shlex.quote(giturl), shlex.quote(gitdir))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(gitdir)\n rhinoscrape(soup, github_user, github_name)\n cmd = 'git add .'\n subprocess.run(shlex.split(cmd), check=False)\n msg = 'Project committed by Rhino Repo'\n cmd = 'git commit -m {}'.format(shlex.quote(msg))\n subprocess.run(shlex.split(cmd), check=False)\n cmd = 'git push {}'.format(shlex.quote(giturl))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(oldcwd)\n shutil.rmtree(tmpdir, ignore_errors=True)", "def create(self):\n if os.path.isdir(self.repodir):\n if os.listdir(self.repodir):\n raise EmtError('%s is not empty' % self.repodir)\n else:\n os.makedirs(self.repodir)\n self.git_cmd('init')\n self.initialized = True", "async def create_from_git(self, token: Any, repo: str) -> Definition:\n response = await self._invoke({\n \"jsonrpc\": \"2.0\",\n \"method\": \"ProjectAPI.CreateFromGit\",\n \"id\": self.__next_id(),\n \"params\": [token, repo, ]\n })\n assert response.status // 100 == 2, str(response.status) + \" \" + str(response.reason)\n payload = await response.json()\n if 'error' in payload:\n raise ProjectAPIError.from_json('create_from_git', payload['error'])\n return Definition.from_json(payload['result'])", "def create_remote_repo(self, auth_token):\n github = Github(auth_token)\n user = github.get_user()\n try:\n return user.create_repo(self.repo)\n except GithubException as e:\n raise PermissionDenied(\n (e._GithubException__data['message'] +\n e._GithubException__data['errors'][0]['message']))", "def init_git_repo(c, repo_name, org_name='kinecosystem', remote='origin', branch='master'):\n # clone git repo if it doesn't exist,\n # otherwise checkout master branch\n dir_name = '{}-git'.format(repo_name)\n git_url = 'https://github.com/{}/{}.git'.format(org_name, repo_name)\n\n if not os.path.isdir('{}/{}/volumes/{}'.format(os.getcwd(), c.cwd, dir_name)):\n print('%s git repository doesn\\'t exist, cloning' % repo_name)\n c.run('git clone --branch {branch} {git_url} volumes/{dir_name}'.format(branch=branch, git_url=git_url, dir_name=dir_name))\n else:\n with c.cd('volumes/{}'.format(dir_name)):\n if is_git_dir_modified(c):\n raise Exit('Stopping, please clean changes and retry')\n\n git_dir_checkout_branch(c, org_name, repo_name, remote, branch)\n\n return dir_name", "def mkdir ():\n name = \"-\".join(parser_arguments().classes)\n if not os.path.exists(name):\n os.mkdir(name)\n print('The repository {} have been created'.format(parser_arguments().classes))\n else:\n print('The repository {} already exists.'.format(parser_arguments().classes))\n pass", "def _make_github_repo(github_login, entity, reponame, existing,\n access_protocol, private, dryrun):\n repo = None\n access_url = None\n try:\n repo = entity.get_repo(reponame)\n access_url = get_repo_url(repo, access_protocol, github_login)\n except gh.GithubException as e:\n if e.status != 404:\n # this is not a not found message, raise\n raise e\n lgr.debug(\n 'To be created repository \"%s\" does not yet exist on Github',\n reponame)\n\n if repo is not None:\n res = dict(\n url=access_url,\n preexisted=True,\n )\n if existing in ('skip', 'reconfigure'):\n return dict(\n res,\n status='notneeded',\n preexisted=existing == 'skip',\n )\n elif existing == 'error':\n return dict(\n res,\n status='error',\n message=('repository \"%s\" already exists on Github', reponame),\n )\n elif existing == 'replace':\n _msg = ('repository \"%s\" already exists on GitHub.', reponame)\n # Since we are running in the loop trying different tokens,\n # this message might appear twice. TODO: avoid\n if ui.is_interactive:\n remove = ui.yesno(\n \"Do you really want to remove it?\",\n title=_msg[0] % _msg[1],\n default=False\n )\n else:\n return dict(\n res,\n status='impossible',\n message=(\n _msg[0] + \" Remove it manually first on GitHub or \"\n \"rerun datalad in an interactive shell to confirm \"\n \"this action.\",\n _msg[1]),\n )\n if not remove:\n return dict(\n res,\n status='impossible',\n message=_msg,\n )\n repo.delete()\n repo = None\n else:\n RuntimeError('must not happen')\n\n if repo is None and not dryrun:\n try:\n repo = entity.create_repo(\n reponame,\n # TODO description='',\n # TODO homepage='',\n private=private,\n has_issues=False,\n has_wiki=False,\n has_downloads=False,\n auto_init=False)\n except gh.GithubException as e:\n if e.status == 404:\n # can happen if credentials are not good enough!\n raise\n msg = \"Github {} ({})\".format(\n e.data.get('message', str(e) or 'unknown'),\n e.data.get('documentation_url', 'no url')\n )\n if e.data.get('errors'):\n msg += ': {}'.format(\n ', '.join(\n [\n err.get('message')\n for err in e.data.get('errors', [])\n if 'message' in err\n ]))\n return dict(\n res,\n status='error',\n message=msg,\n )\n\n if repo is None and not dryrun:\n raise RuntimeError(\n 'something went wrong, we got no Github repository')\n\n # get definitive URL:\n # - use previously determined one\n # - or query a newly created project\n # - or craft one in dryrun mode\n access_url = access_url or '{}github.com{}{}/{}.git'.format(\n 'https://' if access_protocol == 'https' else 'git@',\n '/' if access_protocol == 'https' else ':',\n # this will be the org, in case the repo will go under an org\n entity.login,\n reponame,\n ) if dryrun else get_repo_url(repo, access_protocol, github_login)\n\n return dict(\n status='ok',\n url=access_url,\n preexisted=False,\n )", "def repository_create_hosted():\n pass", "def clone_into_project(git_repo_name):\n repo_dir = git_dir + \"/%s.git\" % git_repo_name\n with cd(remote_dir):\n run('rm -rf myproject')\n run(\"git clone %s %s\" % (repo_dir, project_name))\n run(\"echo 'MY_ENV=\\\"prod\\\"' > %s/%s/site_settings.py\" % (project_name,project_name))\n update_conf_file()", "def pushrepo(projectjson, repourl):\n try:\n components = projectjson['components']\n name = projectjson['name']\n reponame = name + '_sc'\n logger.debug(f\"repourl is : {repourl}\")\n bb_split = repourl.split(\"//\")\n bb_split[1] = f\"{username}:{escape_password}@\"+bb_split[1]\n newrepourl = \"//\".join(bb_split)\n local_code_setup(reponame, newrepourl)\n dst_makefile_path = f\"/tmp/{reponame}/Makefile\"\n if not os.path.exists(dst_makefile_path):\n src_makefile_path = f\"/tmp/skeleton-build/Makefile\"\n copy2(src_makefile_path, dst_makefile_path)\n print(\"Makefile added\")\n createcomponents(components, reponame, newrepourl, name)\n bitbucket.push_repo_to_bitbucket(f\"/tmp/{reponame}\")\n rmtree('/tmp/skeleton-build')\n rmtree(f'/tmp/{reponame}')\n return True\n except Exception as e:\n print(\"caught exception.: \", e)\n return False", "def test_add_repo(self):\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.NO_DIR):\r\n git_import.add_repo(self.TEST_REPO, None, None)\r\n\r\n os.mkdir(self.GIT_REPO_DIR)\r\n self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.URL_BAD):\r\n git_import.add_repo('foo', None, None)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):\r\n git_import.add_repo('file:///foobar.git', None, None)\r\n\r\n # Test git repo that exists, but is \"broken\"\r\n bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))\r\n os.mkdir(bare_repo)\r\n self.addCleanup(shutil.rmtree, bare_repo)\r\n subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,\r\n cwd=bare_repo)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):\r\n git_import.add_repo('file://{0}'.format(bare_repo), None, None)", "def create_remote_gitlab_repo(repository: Repository, username: str, visibility: str,\n access_token: Optional[str] = None) -> None:\n\n default_remote = repository.client_config.config['git']['default_remote']\n admin_service = None\n for remote in repository.client_config.config['git']['remotes']:\n if default_remote == remote:\n admin_service = repository.client_config.config['git']['remotes'][remote]['admin_service']\n break\n\n if not admin_service:\n raise ValueError('admin_service could not be found')\n\n try:\n # Add collaborator to remote service\n mgr = GitLabManager(default_remote, admin_service,\n access_token=access_token or 'invalid')\n mgr.configure_git_credentials(default_remote, username)\n mgr.create_labbook(namespace=InventoryManager().query_owner(repository),\n labbook_name=repository.name,\n visibility=visibility)\n repository.add_remote(\"origin\", f\"https://{default_remote}/{username}/{repository.name}.git\")\n except Exception as e:\n raise GitLabRemoteError(e)", "def createproject(project_name):\n app_clone_script = 'git clone https://github.com/jaarce/falcon-bp.git %s' % project_name\n subprocess.call(app_clone_script.split(' '))", "def clone_repo():\n with settings(warn_only=True):\n run('git clone %(repository_url)s %(repo_path)s' % env)", "def register_git_repository(args, namespace, notifier=None):\n\n tempdir = tempfile.mkdtemp()\n subprocess.check_call(\n \"\"\"\n cd {} &&\n git clone {} user_code\n \"\"\".format(tempdir, args.git_repository), shell=True)\n return register(Service, args, namespace,\n os.path.join(tempdir, 'user_code'), notifier)", "def git_repo(tmp_path: Path) -> git.Repo:\n repo_dir = tmp_path.joinpath(\"repo\")\n shutil.copytree(\n src=f\"{DATA_DIR}/patches/previous/\",\n dst=str(repo_dir),\n )\n repo = git.Repo.init(repo_dir)\n repo.git.add(repo.working_tree_dir)\n repo.git.commit(\"-mInitial patches\")\n shutil.copytree(\n src=f\"{DATA_DIR}/patches/regenerated/\",\n dst=repo.working_tree_dir,\n dirs_exist_ok=True,\n )\n return repo", "def cmd_apps__create(args):\n \n if args.name is None:\n args.name = os.path.basename(os.getcwd())\n\n url = remote.create_project(args.name)\n \n if in_git_repo():\n if get_push_url('tinyserv') is None:\n git(None, 'remote', 'add', 'tinyserv', url)\n print \"Added remote 'tinyserv'.\"\n else:\n print \"This repository is already configured for app '%s'.\" % \\\n _get_current_project_name()\n \n print \"Remote repository URL is %s.\" % url", "def register_repo_create(self, body):\n httpretty.register_uri(\n httpretty.POST,\n '{url}orgs/{org}/repos'.format(\n url=self.URL,\n org=self.ORG,\n ),\n body=body\n )", "def _create_repo(\n self,\n repo_id: str,\n private: Optional[bool] = None,\n token: Optional[Union[bool, str]] = None,\n repo_url: Optional[str] = None,\n organization: Optional[str] = None,\n ) -> str:\n if repo_url is not None:\n warnings.warn(\n \"The `repo_url` argument is deprecated and will be removed in v5 of Transformers. Use `repo_id` \"\n \"instead.\"\n )\n if repo_id is not None:\n raise ValueError(\n \"`repo_id` and `repo_url` are both specified. Please set only the argument `repo_id`.\"\n )\n repo_id = repo_url.replace(f\"{HUGGINGFACE_CO_RESOLVE_ENDPOINT}/\", \"\")\n if organization is not None:\n warnings.warn(\n \"The `organization` argument is deprecated and will be removed in v5 of Transformers. Set your \"\n \"organization directly in the `repo_id` passed instead (`repo_id={organization}/{model_id}`).\"\n )\n if not repo_id.startswith(organization):\n if \"/\" in repo_id:\n repo_id = repo_id.split(\"/\")[-1]\n repo_id = f\"{organization}/{repo_id}\"\n\n url = create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True)\n return url.repo_id", "def cli(ctx, repo_home):\n # Create a repo object and remember it as as the context object.\n ctx.obj = Repo(os.path.abspath(repo_home))", "def clone_repo(parent_repo, new_repo, org, dir=None):\n if dir:\n os.chdir(dir)\n run(\n f\"git clone --origin {parent_repo} [email protected]:{org}/{parent_repo}.git {new_repo}\",\n \"Clone an existing remote repository to the new name locally.\",\n )\n os.chdir(new_repo)\n run(\n f\"git remote set-url --push {parent_repo} no_push\",\n \"Disable pushing to the upstream (parent) repository.\",\n )\n run(\n f\"git remote add origin [email protected]:{org}/{new_repo}.git\",\n \"Add a new remote origin for the this repository.\",\n )\n run(\"git tag -d $(git tag -l)\", f\"Delete all local git tags from {parent_repo}\")\n run(\n rf\"find . \\( ! -regex '.*/\\.git/.*' \\) -type f -exec \"\n rf\"perl -pi -e s/{parent_repo}/{new_repo}/g {{}} \\;\",\n \"Search and replace repository name in source files.\",\n )\n lineage = {\n \"version\": LINEAGE_CONFIG_VERSION,\n \"lineage\": {\n \"skeleton\": {\"remote-url\": f\"https://github.com/{org}/{parent_repo}.git\"}\n },\n }\n with LINEAGE_CONFIG.open(\"w\") as f:\n yaml.dump(lineage, stream=f, explicit_start=True)\n run(\"git add --verbose .\", \"Stage modified files.\")\n run(\n 'git commit --message \"Rename repository references after clone.\"',\n \"Commit staged files to the new repository.\",\n )\n print(\"―\" * 80)\n print(\n f\"\"\"\nThe repository \"{parent_repo}\" has been cloned and renamed to \"{new_repo}\".\nUse the following commands to push the new repository to github:\n cd {os.path.join(dir, new_repo) if dir else new_repo}\n git push --set-upstream origin develop\n \"\"\"\n )", "def clone_github_repo(self):\n repository_local_destination = os.path.join(MODULES_PATH, 'github', self.username, self.repository_name)\n if not os.path.exists(repository_local_destination):\n Repo.clone_from(self.repo_url, repository_local_destination, branch='master')\n init_filename = os.path.join(repository_local_destination, '__init__.py')\n open(init_filename, 'a').close()", "def non_git_repo(init_source_repo):\n _, parent_dir, _ = init_source_repo\n\n # Create\n non_git_dir_path = create_dir(\n full_path=os.path.join(tempfile.gettempdir(), \"non-git-repo\"),\n on_conflict=\"replace\",\n )\n\n yield non_git_dir_path\n\n # Delete the non-git repo\n delete_dir(non_git_dir_path)", "def clone(connection, url, rid, vsid='6IT', start_dir='src/', vcs_token=None, error_exists=True,\n role='SOURCE', typ='GITHUB'):\n\n config = {}\n\n if start_dir:\n config['VCS_TARGET_DIR'] = start_dir\n\n if vcs_token:\n config['CLIENT_VCS_AUTH_TOKEN'] = vcs_token\n\n repo = Repository(connection, rid)\n\n try:\n repo.create(url, vsid, config=config, role=role, typ=typ)\n except GCTSRepoAlreadyExistsError as ex:\n if error_exists:\n raise ex\n\n _mod_log().debug(ex)\n _mod_log().info(str(ex))\n\n repo.wipe_data()\n\n if not repo.is_cloned:\n repo.clone()\n else:\n _mod_log().info('Not cloning the repository \"%s\": already performed')\n\n return repo", "def init_repo(repo_clone_url, path, version):\n # Create path for repo\n local_repo = Path(path) / version\n local_repo = local_repo.expanduser()\n \n # Initialize repository\n repo = git.Repo.clone_from(repo_clone_url, local_repo)\n return repo, local_repo", "def _mock_git_clone(self, args: List[str]) -> None:\n cloned_repo_root = args[-1]\n\n # Create \"cloned\" directory and subfolders.\n if cloned_repo_root.endswith('test-repo1'):\n self.fs.create_file(os.path.join(cloned_repo_root, 'yara', 'cloned.yara'))\n self.fs.create_file(os.path.join(cloned_repo_root, 'not_included.yara'))\n elif cloned_repo_root.endswith('test-repo2'):\n self.fs.create_file(os.path.join(cloned_repo_root, 'yara', 'cloned.yara'))\n self.fs.create_file(os.path.join(cloned_repo_root, 'yara', 'exluded_mobile.yara'))\n self.fs.create_file(os.path.join(cloned_repo_root, 'windows', 'excluded.yara'))\n elif cloned_repo_root.endswith('test-repo3'):\n self.fs.create_file(os.path.join(cloned_repo_root, 'yara', 'cloned.yara'))", "def makeRepository(self, root):\n _gitInit(root)\n return root", "def init(ctx, repository, git):\n dufl_root = ctx.obj['dufl_root']\n if os.path.exists(dufl_root):\n click.echo(\n 'Folder %s already exists, cannot initialize.' % dufl_root,\n err=True\n )\n exit(1)\n\n try:\n click.echo('Creating %s...' % dufl_root)\n os.makedirs(dufl_root, ctx.obj['create_mode'])\n\n click.echo('Initializing git repository...')\n giti = Git(git, dufl_root)\n giti.run('init')\n if repository != '':\n giti.run('remote', 'add', 'origin', repository)\n\n click.echo('Looking for remote repository...')\n repo_exists = False\n try:\n giti.run('ls-remote', repository)\n repo_exists = True\n except GitError:\n pass\n\n if repo_exists:\n click.echo('Pulling master branch of %s' % repository)\n giti.run('pull', 'origin', 'master')\n else:\n click.echo('No remote specified. You will need to add it manually when you have one.')\n\n if not os.path.exists(os.path.join(dufl_root, ctx.obj['home_subdir'])):\n click.echo('Creating home subfolder in %s' % dufl_root)\n os.makedirs(os.path.join(dufl_root, ctx.obj['home_subdir']), ctx.obj['create_mode'])\n if not os.path.exists(os.path.join(dufl_root, ctx.obj['slash_subdir'])):\n click.echo('Creating absolute subfolder in %s' % dufl_root)\n os.makedirs(os.path.join(dufl_root, ctx.obj['slash_subdir']), ctx.obj['create_mode'])\n\n if not os.path.exists(os.path.join(dufl_root, ctx.obj['settings_file'])):\n click.echo('Creating default settings file in %s' % dufl_root)\n with open(os.path.join(dufl_root, ctx.obj['settings_file']), 'w') as the_file:\n the_file.write(yaml.dump(dict(\n defaults.settings.items() + {\n 'git': git\n }.items()\n )))\n giti.run('add', os.path.join(dufl_root, ctx.obj['settings_file']))\n giti.run('commit', '-m', 'Initial settings file.')\n\n click.echo('Done!')\n except Exception as e:\n click.echo(e, err=True)\n click.echo(\n 'Failed. To retry, you will need to clean up by deleting the folder %s' % dufl_root,\n err=True\n )\n exit(1)", "def repo():\n name = REPO_NAME_PREFIX + randstring()\n desc = randstring()\n repo = webapi.repos.create_repo(name)\n print('[create repo] repo_id: %s' % repo.id)\n with only_update_one_repo(repo.id):\n try:\n yield repo\n finally:\n try:\n repo.delete()\n except:\n print(\"repo is deleted\")", "def create_project(opts):\n if opts['django']:\n structure.create_django_proj(opts)\n if opts['cookiecutter_template']:\n structure.create_cookiecutter(opts)\n proj_struct = structure.make_structure(opts)\n structure.create_structure(proj_struct,\n update=opts['update'] or opts['force'])\n if not opts['update'] and not repo.is_git_repo(opts['project']):\n repo.init_commit_repo(opts['project'], proj_struct)", "def cmd_init(self, _):\n log.info('initializing repository...')\n # check if a repository already exists\n if os.path.exists(self.path):\n log.warning(\"the '{}' folder already exists\".format(self.path))\n if log.ask_yesno('overwrite existing repository?', default='n'):\n shutil.rmtree(self.path)\n log.debug(\"creating folder: {}\".format(self.path))\n os.mkdir(self.path)\n else:\n return\n log.debug('initializing Git repository')\n self.git_repo = Repo.init(self.path)\n # create .gitignore to avoid tracking decrypted files\n log.debug('adding decrypted files to Git ignore list')\n with open(os.path.join(self.path, '.gitignore'), 'a') as ofile:\n ofile.write('encrypted/*.cleartext\\n')\n # create repository subfolders\n for dirpath in (self.files_path, self.enc_files_path):\n log.debug(\"creating folder: {}\".format(dirpath))\n os.mkdir(dirpath)\n log.debug(\"adding .gitkeep file\")\n with open(os.path.join(dirpath, '.gitkeep'), 'w') as _:\n pass\n log.debug('adding new files to Git')\n self.git_commit('initial commit')\n log.debug('creating new branch: {}'.format(self.hostname))\n self.git_repo.head.reference = self.git_repo.create_head(self.hostname, 'HEAD')\n assert not self.git_repo.head.is_detached\n self.git_repo.head.reset(index=True, working_tree=True)\n log.info('done')", "def init(args: argparse.Namespace) -> None:\n\tbranch = args.branch\n\turl = args.url\n\n\trepo_path = os.path.join(os.path.abspath(\".\"), \".repo\")\n\tLOGGER.info(\"Creating repo directory at %s\", repo_path)\n\tos.makedirs(repo_path, exist_ok=True)\n\t_run_git([\"clone\", \"-b\", branch, url, MANIFEST_DIRECTORY], repo_path)\n\tLOGGER.info(\"Initialized repository at %s\", repo_path)", "def init_repo(cls, repo):\n os.makedirs(os.path.join(repo, cls.name))", "def git_clone(repo_path, path):\n r = envoy.run('git clone {repo} {path}'.format(repo=repo_path, path=path))\n if r.status_code != 0 and r.std_err != '':\n return False\n return True", "def clone():\n with cd(os.path.dirname(env.proj_root.rstrip('/'))):\n run('git clone --recursive %s' % (git_repo,))", "def create(ctx, template_name, website_name):\n try:\n # Check if the destination directory already exists\n path = os.path.join(ctx.obj['BASEDIR'], website_name)\n if os.path.exists(path):\n answer = input('Do you want to delete the existing directory? [Y] ')\n if answer.lower() == 'y' or answer == '':\n shutil.rmtree(path)\n\n # Generate github repo string\n github_name = template_name\n if '/' not in template_name:\n github_name = 'docker-hosting/%s-template' % template_name\n \n # Try to download repository\n link = 'https://github.com/%s/archive/master.zip' % github_name\n urlretrieve(link, 'master.zip')\n\n # Unzip downloaded file to destination directory\n zip_ref = zipfile.ZipFile('master.zip', 'r')\n zip_ref.extractall(path)\n zip_ref.close()\n\n # The destination folder contains another folder named [github-repo-name]-master.\n # We need to move all files within this directory and delete it afterwards.\n repo_name = github_name.split('/')[1]\n master_dir = os.path.join(path, repo_name + '-master')\n for file in os.listdir(master_dir):\n shutil.move(os.path.join(master_dir, file), path)\n os.rmdir(os.path.join(path, repo_name + '-master'))\n\n # Now remove the file master.zip\n os.remove('master.zip')\n except PermissionError as e:\n # TODO: handle and log exceptions\n print('%s\\n%s' % (e, 'Note: Try to running this program as Administrator.'))\n except Exception as e:\n # TODO: handle and log exceptions\n print(e)", "def init_repo(self, repo_dir, create_dirs=False):\n if create_dirs:\n # recursive create non-existent dirs\n if not os.path.isdir(repo_dir):\n os.makedirs(repo_dir)\n\n self.repo = git.Repo.init(repo_dir)\n\n return self.repo", "def callback_repo_create(self, request, uri, headers, status_code=201):\n # Disabling unused-argument because this is a callback with\n # required method signature.\n # pylint: disable=unused-argument\n self.assertEqual(\n request.headers['Authorization'],\n 'token {0}'.format(self.OAUTH2_TOKEN)\n )\n repo_dict = json.loads(request.body)\n self.assertTrue(\n repo_dict['name'] in [self.TEST_REPO, self.TEST_RERUN_REPO]\n )\n self.assertEqual(repo_dict['description'], self.TEST_DESCRIPTION)\n self.assertEqual(repo_dict['private'], True)\n\n return (status_code, headers, json.dumps({'html_url': 'testing'}))", "def git_clone(repository, directory=None, separate_git_dir=None, template_dir=None, environment=None):\n clone_cmd = [\"git\", \"clone\"]\n if separate_git_dir:\n clone_cmd.append(\"--separate-git-dir\")\n clone_cmd.append(separate_git_dir)\n if template_dir:\n clone_cmd.append(\"--template={}\".format(template_dir))\n clone_cmd.append(\"--\")\n clone_cmd.append(repository)\n if directory:\n clone_cmd.append(directory)\n\n return ext.execute(clone_cmd, environment=environment)", "def create(client, args):\n\n\tdef validate_description(text):\n\t\tif len(text) == 0:\n\t\t\tprint 'Description may not be empty. Try again.'\n\t\t\treturn False\n\t\treturn True\n\n\tdef validate_name(text):\n\t\tif len(text) == 0:\n\t\t\tprint 'Name may not be empty. Try again.'\n\t\t\treturn False\n\t\tif any(char for char in text if char.isspace()):\n\t\t\tprint 'Name may not contain spaces. Try again.'\n\t\t\treturn False\n\t\t# What other characters don't belong in the name?\n\t\treturn True\n\n\tdef validate_homepage(text):\n\t\t# This is a lame excuse for validation.\n\t\tif len(text) == 0:\n\t\t\tprint 'Home page may not be empty. Try again.'\n\t\t\treturn False\n\t\treturn True\n\n\tname = read_user_input('Repository name', validate_name)\n\thomepage = read_user_input('Homepage', validate_homepage)\n\tdescription = read_user_input('Description', validate_description)\n\tprint client.repos.create(name, description, homepage)", "def test_create_repository(koan, assert_repo_exists):\n koan.shell('')", "def gitAdd(filename, repo_dir):\n file_path = \"%s/%s\" % (repo_dir, filename)\n git(\"add\", file_path)", "def repository_create_hosted_yum(ctx: click.Context, **kwargs):\n _create_repository(ctx, 'hosted', **kwargs)", "def post(self):\n if not request.json:\n return None, 400\n\n created_git_repository: GitRepositoryModel = self.datastore.create(document=request.json)\n return created_git_repository, 201", "def init(repo, directory=None):\n if not repo.endswith('.git'):\n repo += '.git'\n if directory is None:\n directory = '%s/git' % os.environ['HOME']\n PC.make_dir(directory)\n fullpath = '%s/%s' % (directory, repo)\n if os.path.exists(fullpath):\n error(\"INIT-ERROR>> Existing repository: '%s'\\n\" % fullpath)\n exec_cmd(\"git init --bare %s\" % fullpath, True)\n hooks = ['post-receive', 'update']\n for hook in hooks:\n path = '%s/hooks' % fullpath\n make_hook(hook, path)\n sys.stdout.write(\"INIT>> '%s' was created...\\n\" % fullpath)", "def add_prod_repo_as_origin_and_push(git_repo_name):\n local(\"\"\"echo '[remote \"origin\"]' >> .git/config\"\"\")\n local(r\"echo ' fetch = +refs/heads/*:refs/remotes/origin/*' >> .git/config\")\n local(r\"echo ' url = %s:webapps/git/repos/%s.git' >> .git/config\" % (env.hosts[0], git_repo_name))\n local(r\"git push origin master\")", "def create_new_python_project():\n\t# Create the different variables\n\tfolder_name = str(sys.argv[1])\n\tdir_name = my_project_folder + folder_name\n\tpy_file = dir_name + '/' + folder_name + '.py'\n\treadme_file = dir_name + '/' + 'README.md'\n\ttodo_file = dir_name + '/' + 'TODO.txt'\n\n\t# Create directory if it does not exist yet\n\tif not os.path.exists(dir_name):\n\t\tos.mkdir(dir_name)\n\t\tprint(\"Directory \" , dir_name , \" Created \")\n\n\t\t# Create Python file\n\t\tdata = ''\n\t\twith open(template_py, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(py_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Python file created\")\n\n\t\t# Create README file\n\t\tdata = ''\n\t\twith open(template_readme, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(readme_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Readme file created\")\n\n\t\t# Create Todo file\n\t\twith open(todo_file, 'w') as f:\n\t\t\tprint(\"TODO file created\")\n\n\t\t# Create Github repo\n\t\twith open(\".env\", \"r\") as f:\n\t\t\tdata = f.read()\n\n\t\tindex_1 = data.find('TOKEN=\"') + len('TOKEN=\"')\n\t\ttoken = data[index_1:-1]\n\t\tg = Github(token)\n\t\tuser = g.get_user()\n\t\trepo = user.create_repo(folder_name)\n\t\tprint(\"Succesfully created repository {}\".format(folder_name))\n\n\n\telse: \n\t\tprint(\"Directory \" , dir_name , \" already exists\")", "def test_heads_create_new_branch_name(repository: Repository) -> None:\n branch = repository.heads.create(\"branch\", repository.head.commit)\n assert \"branch\" == branch.name", "def _create_bare_git_repo(cache_dir):\n\n git_dir = '--git-dir=' + cache_dir\n\n if GIT.execute([git_dir, 'init', '--bare', '--quiet'], cwd=cache_dir):\n # After preparing the bare repository, remove the bare flag from the\n # configuration. This is to later allow interaction with this cache\n # for developers who may wish to interact with a Git file system\n # inside the build directory. During the extraction stage, we setup\n # a `.git` file with a reference to the cache directory. This should\n # allow users to invoke Git operations from inside the build\n # directory, but the Git client can complain that it is not a valid\n # repository setup. The error goes away when the configured Git\n # directory is not bare.\n if not GIT.execute([git_dir, 'config', '--unset', 'core.bare'],\n cwd=cache_dir):\n verbose('unable to remove bare configuration on repository')\n\n return True\n\n err('unable to initialize bare git repository')\n return False", "def repository_create_hosted_recipe(ctx: click.Context, **kwargs):\n _create_repository(ctx, 'hosted', **kwargs)", "def git_clone_repo(remote_url, target_dir):\n log.info('Cloning [{}] into [{}]'.format(remote_url, target_dir))\n git.Repo.clone_from(remote_url, target_dir)", "def clone_repo():\n\n with cd(env.root):\n sudo('git clone %(repo)s %(code_root)s' % env, user=env.deploy_user)", "def generate_repo(\n specs,\n repo_dir,\n base_url,\n repo_name,\n repo_format='all',\n):\n LOGGER.info('Creating repo for specs %s', ','.join(specs))\n\n if not os.path.exists(repo_dir):\n os.makedirs(repo_dir)\n\n if repo_format == 'lago':\n spec_cls = LagoSpec\n elif repo_format == 'virt-builder':\n spec_cls = VirtBuilderSpec\n else:\n spec_cls = AllSpec\n\n images_to_build = []\n for spec in specs:\n spec_obj = spec_cls.from_spec_file(spec)\n dst_path = os.path.join(repo_dir, spec_obj.name)\n images_to_build.append(\n images.get_instance(spec_obj, dst_path)\n )\n\n for image in images_to_build:\n image.build()\n\n createrepo.create_repo_from_metadata(repo_dir, repo_name, base_url)", "def git_server():\n log('Instalando git', yellow)\n sudo('apt-get -y install git')", "def sync_git_repo():\n # get the current dir of this script\n current_dir = os.path.dirname(os.path.realpath(sys.argv[0]))\n repo_path = os.path.join(current_dir,REPO_NAME)\n logging.info(\"Repository path is: \"+repo_path)\n # check to see if a repo has been init already\n try: \n repo = git.Repo(repo_path)\n logging.info(\"Git repo has already been created.\")\n except (git.exc.InvalidGitRepositoryError,git.exc.NoSuchPathError):\n logging.info(\"No git repo has been initialized for this module. Cloning from github.com now.\")\n repo_url = \"https://\"+REPO_USERNAME+\":\"+REPO_PERSONAL_ACCESS_TOKEN+\"@github.com/\"+REPO_USERNAME+\"/\"+REPO_NAME+\".git\"\n git.Repo.clone_from(repo_url,repo_path)\n logging.info(\"Repo cloned successfully.\")\n repo = git.Repo(repo_path)\n # now we have a valid repo created \n # pull the latest data from the repo\n origin = repo.remotes.origin\n origin.pull()\n # create the csv output dir if it does not exist\n Path(paho_csv_reports_dir).mkdir(parents=False, exist_ok=True)\n # get all csv files in this dir\n all_paho_csv_files = glob.glob(paho_csv_reports_dir+os.path.sep+\"*.csv\")\n # add all files in this dir to the repo index\n repo.index.add(all_paho_csv_files)\n logging.info(\"Added all .csv files from \"+paho_csv_reports_dir+\" to repo index.\")\n # set the commit message\n repo.index.commit(\"Automatic commit by \"+os.path.basename(__file__))\n # git push \n origin.push()\n logging.info(\"All csv files pushed to github repo successfully.\")", "def init_remote_repo(repo_dir, url, branch='master'):\n log = LOG.bind(url=url, repo_dir=repo_dir, branch=branch)\n try:\n log.debug(\"Initializing local repo\")\n repo = git.Repo.init(repo_dir)\n remote = repo.create_remote('origin', url)\n remote.fetch()\n remote.refs[branch].checkout()\n return repo\n except Exception as e:\n msg = 'Error fetching remote commits'\n log.exception(msg, exc_info=e)\n raise GitToolException(msg)", "def _git_init(repo):\n repo.mkdir()\n subprocess.run(['git', '-c', 'init.defaultBranch=main', '-C', repo, 'init'],\n check=True)", "def process_repo(vb, options):\n if not options.repo:\n return\n\n vb.add_repo(options.repo_os, options.repo_id, options.repo_name, options.repo_url,\n options.unique, options.repo_tags)", "def createDB():\n print(\"::creating db\")\n filepath = confighome+\"config\"\n\n # open config to get credentials for ssh \n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n creds=jconfig[0]\n\n # ssh in make a directory, initialize it with 'git --bare' \n cmd=\"ssh \"+creds['db']['username']+\"@\"+creds['db']['host']\n cmd_sqrd=\" 'if ! cd swrss_database > /dev/null 2>&1 ; then mkdir swrss_database; cd swrss_database ; fi ; git init --bare ;'\"\n cmd_full=cmd+cmd_sqrd\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)\n if retval != 0:\n print(\"::error encountered. Make sure you have stored your remote's info in the config\")\n\n # locally clone the \"db\"\n cmd_full=\"git clone \"+creds['db']['username']+\"@\"+creds['db']['host']+\":swrss_database\"\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)", "def create_branch(self):\n os.chdir(str(self.repository_path))\n sh.git.checkout('master')\n sh.git.checkout('-b', self.branch)\n logger.debug('Branch {} created', self.branch)", "def git_clone(git_url=QMK_GIT_URL, git_branch=QMK_GIT_BRANCH):\n repo = repo_name(git_url)\n zipfile_name = repo + '.zip'\n command = ['git', 'clone', '--single-branch', '-b', git_branch, git_url, repo]\n\n try:\n check_output(command, stderr=STDOUT, universal_newlines=True)\n os.chdir(repo)\n hash = check_output(['git', 'rev-parse', 'HEAD'])\n open('version.txt', 'w').write(hash.decode('cp437') + '\\n')\n repo_cloned = True\n\n except CalledProcessError as build_error:\n repo_cloned = False\n logging.error(\"Could not clone %s: %s (returncode: %s)\" % (repo, build_error.output, build_error.returncode))\n logging.exception(build_error)\n\n os.chdir('..')\n\n if repo_cloned:\n store_source(zipfile_name, repo, 'cache')\n\n return True", "def test_heads_create_new_branch_commit(repository: Repository) -> None:\n branch = repository.heads.create(\"branch\", repository.head.commit)\n assert repository.head.commit == branch.commit", "def test_init_repo_creates_a_zen_folder(tmp_path: str) -> None:\n _ = Repo.init(tmp_path)\n repo = Repository(str(tmp_path))\n local_stack = LocalService().get_stack(\"local_stack\")\n repo.init_repo(\n repo_path=tmp_path, analytics_opt_in=False, stack=local_stack\n )\n assert os.path.exists(os.path.join(tmp_path, ZENML_DIR_NAME))", "def add_repo(repo, rdir_in, branch=None):\r\n # pylint: disable=R0915\r\n\r\n # Set defaults even if it isn't defined in settings\r\n mongo_db = {\r\n 'host': 'localhost',\r\n 'user': '',\r\n 'password': '',\r\n 'db': 'xlog',\r\n }\r\n\r\n # Allow overrides\r\n if hasattr(settings, 'MONGODB_LOG'):\r\n for config_item in ['host', 'user', 'password', 'db', ]:\r\n mongo_db[config_item] = settings.MONGODB_LOG.get(\r\n config_item, mongo_db[config_item])\r\n\r\n if not os.path.isdir(GIT_REPO_DIR):\r\n raise GitImportError(GitImportError.NO_DIR)\r\n # pull from git\r\n if not (repo.endswith('.git') or\r\n repo.startswith(('http:', 'https:', 'git:', 'file:'))):\r\n raise GitImportError(GitImportError.URL_BAD)\r\n\r\n if rdir_in:\r\n rdir = os.path.basename(rdir_in)\r\n else:\r\n rdir = repo.rsplit('/', 1)[-1].rsplit('.git', 1)[0]\r\n log.debug('rdir = {0}'.format(rdir))\r\n\r\n rdirp = '{0}/{1}'.format(GIT_REPO_DIR, rdir)\r\n if os.path.exists(rdirp):\r\n log.info('directory already exists, doing a git pull instead '\r\n 'of git clone')\r\n cmd = ['git', 'pull', ]\r\n cwd = rdirp\r\n else:\r\n cmd = ['git', 'clone', repo, ]\r\n cwd = GIT_REPO_DIR\r\n\r\n cwd = os.path.abspath(cwd)\r\n try:\r\n ret_git = cmd_log(cmd, cwd=cwd)\r\n except subprocess.CalledProcessError as ex:\r\n log.exception('Error running git pull: %r', ex.output)\r\n raise GitImportError(GitImportError.CANNOT_PULL)\r\n\r\n if branch:\r\n switch_branch(branch, rdirp)\r\n\r\n # get commit id\r\n cmd = ['git', 'log', '-1', '--format=%H', ]\r\n try:\r\n commit_id = cmd_log(cmd, cwd=rdirp)\r\n except subprocess.CalledProcessError as ex:\r\n log.exception('Unable to get git log: %r', ex.output)\r\n raise GitImportError(GitImportError.BAD_REPO)\r\n\r\n ret_git += '\\nCommit ID: {0}'.format(commit_id)\r\n\r\n # get branch\r\n cmd = ['git', 'symbolic-ref', '--short', 'HEAD', ]\r\n try:\r\n branch = cmd_log(cmd, cwd=rdirp)\r\n except subprocess.CalledProcessError as ex:\r\n # I can't discover a way to excercise this, but git is complex\r\n # so still logging and raising here in case.\r\n log.exception('Unable to determine branch: %r', ex.output)\r\n raise GitImportError(GitImportError.BAD_REPO)\r\n\r\n ret_git += '{0}Branch: {1}'.format(' \\n', branch)\r\n\r\n # Get XML logging logger and capture debug to parse results\r\n output = StringIO.StringIO()\r\n import_log_handler = logging.StreamHandler(output)\r\n import_log_handler.setLevel(logging.DEBUG)\r\n\r\n logger_names = ['xmodule.modulestore.xml_importer', 'git_add_course',\r\n 'xmodule.modulestore.xml', 'xmodule.seq_module', ]\r\n loggers = []\r\n\r\n for logger_name in logger_names:\r\n logger = logging.getLogger(logger_name)\r\n logger.setLevel(logging.DEBUG)\r\n logger.addHandler(import_log_handler)\r\n loggers.append(logger)\r\n\r\n try:\r\n management.call_command('import', GIT_REPO_DIR, rdir,\r\n nostatic=not GIT_IMPORT_STATIC)\r\n except CommandError:\r\n raise GitImportError(GitImportError.XML_IMPORT_FAILED)\r\n except NotImplementedError:\r\n raise GitImportError(GitImportError.UNSUPPORTED_STORE)\r\n\r\n ret_import = output.getvalue()\r\n\r\n # Remove handler hijacks\r\n for logger in loggers:\r\n logger.setLevel(logging.NOTSET)\r\n logger.removeHandler(import_log_handler)\r\n\r\n course_key = None\r\n location = 'unknown'\r\n\r\n # extract course ID from output of import-command-run and make symlink\r\n # this is needed in order for custom course scripts to work\r\n match = re.search(r'(?ms)===> IMPORTING course (\\S+)', ret_import)\r\n if match:\r\n course_id = match.group(1)\r\n try:\r\n course_key = CourseKey.from_string(course_id)\r\n except InvalidKeyError:\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)\r\n cdir = '{0}/{1}'.format(GIT_REPO_DIR, course_key.course)\r\n log.debug('Studio course dir = {0}'.format(cdir))\r\n\r\n if os.path.exists(cdir) and not os.path.islink(cdir):\r\n log.debug(' -> exists, but is not symlink')\r\n log.debug(subprocess.check_output(['ls', '-l', ],\r\n cwd=os.path.abspath(cdir)))\r\n try:\r\n os.rmdir(os.path.abspath(cdir))\r\n except OSError:\r\n log.exception('Failed to remove course directory')\r\n\r\n if not os.path.exists(cdir):\r\n log.debug(' -> creating symlink between {0} and {1}'.format(rdirp, cdir))\r\n try:\r\n os.symlink(os.path.abspath(rdirp), os.path.abspath(cdir))\r\n except OSError:\r\n log.exception('Unable to create course symlink')\r\n log.debug(subprocess.check_output(['ls', '-l', ],\r\n cwd=os.path.abspath(cdir)))\r\n\r\n # store import-command-run output in mongo\r\n mongouri = 'mongodb://{user}:{password}@{host}/{db}'.format(**mongo_db)\r\n\r\n try:\r\n if mongo_db['user'] and mongo_db['password']:\r\n mdb = mongoengine.connect(mongo_db['db'], host=mongouri)\r\n else:\r\n mdb = mongoengine.connect(mongo_db['db'], host=mongo_db['host'])\r\n except mongoengine.connection.ConnectionError:\r\n log.exception('Unable to connect to mongodb to save log, please '\r\n 'check MONGODB_LOG settings')\r\n cil = CourseImportLog(\r\n course_id=course_key,\r\n location=location,\r\n repo_dir=rdir,\r\n created=timezone.now(),\r\n import_log=ret_import,\r\n git_log=ret_git,\r\n )\r\n cil.save()\r\n\r\n log.debug('saved CourseImportLog for {0}'.format(cil.course_id))\r\n mdb.disconnect()", "def new(url):\n from grit import Repo\n return Repo.new(url=url, bare=True)", "def newproject():\n log('Criando novo projeto', yellow)\n log('Cria a conta no bitbucket com o nome do projeto vázio que o script se encarregará do resto', red)\n\n conta = raw_input('Digite o nome do projeto: ')\n\n local('echo \"clonando projeto %s\"' % bitbucket_repository)\n local('git clone {0} {1}{2}'.format(bitbucket_repository, folder_project_local, conta))\n local('cd {0}{1}'.format(folder_project_local, conta))\n local('mkvirtualenv {0}'.format(conta))\n local('setvirtualenvproject')\n local('pip install -r requirements.txt')\n local('rm -rf {0}{1}/.git'.format(folder_project_local, conta))\n local('rm -rf README.md')\n local('git init')\n local('git remote add origin [email protected]:{0}/{1}.git'.format(bitbucket_user, conta))", "def repository_create_hosted_maven(ctx: click.Context, **kwargs):\n _create_repository(ctx, 'hosted', **kwargs)", "def __add_repo(repo_name, url):\n\n conn = sqlite3.connect(DTF_DB)\n cur = conn.cursor()\n\n entry = [(repo_name, url)]\n\n sql = ('INSERT INTO repos (repo_name, url)'\n 'VALUES (?, ?)')\n\n cur.executemany(sql, entry)\n conn.commit()\n\n return 0", "def create_mirror(self, repo, body):\n url = self._repo_url(repo, other='/mirror')\n response = self.rest.post(url)\n\n if response.status_code is not 201:\n self.module.fail_json(msg=response.info)\n return response.info", "def clone_repo(start=0,end=100000):\n repo_list=repo_url['URLs']\n count=0\n\n for url in repo_list[start:end]:\n url=str(url)\n name=url.rsplit('/', 2) #get the repo name (last 2 part) of the repository url\n last=name[-2]+'-'+name[-1]\n try:\n if not os.path.exists(last):\n os.mkdir(last) #Make folder for a repo if it does not exist\n repo=str(url) + '.git'\n folder= r'repos'\n Repo.clone_from(repo,last)\n count+=1\n print('cloned ' , repo)\n except:\n continue\n return count", "def clone(repo, directory, branch=\"master\"):\n return CloneGitRepo.builder() \\\n .repository(repo) \\\n .branch(branch) \\\n .directory(directory) \\\n .build()", "def clone_repository(cls):\n Git(git_path).clone(cls.url + '.git')\n return True", "def git_clone(git_url, git_folder):\n run(\"git clone %s %s &> /dev/null\" % (git_url, git_folder))", "def clone_from_git() -> co.Exec:\n git_url = \"https://github.com/conducto/demo.git\"\n image = co.Image(\n dockerfile=\"./docker/Dockerfile.git\", copy_url=git_url, copy_branch=\"main\",\n )\n return co.Exec(\"python cicd/code/test.py\", image=image, doc=co.util.magic_doc())", "def _get_tmp_repo(self):\n repo_path = os.path.join(TEMP_DIR_ROOT, 'repocopy_' + slugify(self.data['repository']['name']))\n if HARD_COPY and os.path.exists(repo_path):\n shutil.rmtree(repo_path)\n elif os.path.exists(repo_path):\n return git.Repo(repo_path)\n\n os.mkdir(repo_path)\n return git.Repo.init(repo_path)", "def create_repo_dir(self):\n if os.path.exists(self.repo_path):\n self.report(f'repo dir exists: {self.repo_path}')\n status = 0\n else:\n cmd=['/usr/bin/sudo',\n '/usr/bin/install', '-d',\n '-m', '02775', \n '-o', self.user, \n '-g', str(self.gid),\n self.repo_path]\n status = self._call(cmd)\n if status == 0:\n self.report(f'created repo dir: {self.repo_path}')\n elif status is None:\n pass\n else:\n self.report(f'failed to create repo dir: {self.repo_path}')\n return status", "def test_clone_repository(koan, assert_cloned_repo_exists):\n koan.shell('')", "def fetch_repo(root, repo, url, destination_temp):\n\n print \"Fetching %s from %s\" % (repo, url)\n\n if root.exists('repos/%s' % repo):\n print \"Repo %s exists, issuing a git pull...\" % repo\n call('cd repos/%s; git pull' % repo, shell=True)\n else:\n print \"Repo %s does not exist, issuing a git clone...\" % repo\n\n # explicitely create dir as implicit creation fails on server\n root.makedir('%s/%s' % (destination_temp, repo))\n call('cd repos; git clone %s %s' % (url, repo), shell=True)\n # call('git clone %s %s/%s > /dev/null 2>&1' % (repo['url'], source, repo['id']), shell=True)", "def __gitCreateArchive(self):\n self.vcs.gitCreateArchive(self.project.getProjectPath())", "def git_remote(git_repo):\n github_token = os.getenv(GITHUB_TOKEN_KEY)\n if github_token:\n return 'https://{0}@github.com/{1}'.format(\n github_token, git_repo)\n return '[email protected]:{0}'.format(git_repo)", "def create_staging_repo(configs):\n print(\"Creating staging repo...\")\n\n url = configs[\"nexus\"][\"createRepoURL\"]\n content = configs[\"nexus\"][\"stagingRepoDescription\"]\n header = {\"Content-Type\": \"application/xml\"}\n basic_auth = HTTPBasicAuth(configs[\"nexus\"][\"username\"], configs[\"passwords\"][\"nexus\"])\n response = requests.post(url, data=content, headers=header, auth=basic_auth)\n\n if response.status_code == 201:\n json_response = json.loads(json.dumps(xmltodict.parse(response.text)))\n staging_repo = json_response[\"promoteResponse\"][\"data\"][\"stagedRepositoryId\"]\n print(\"--Staging repo created: \" + staging_repo)\n return staging_repo\n else:\n raise Exception(\"Failed at creating staging repo. Status code: \" +\n str(response.status_code) + \" Response content: \" + response.text)" ]
[ "0.78584546", "0.76648015", "0.7265442", "0.72380567", "0.719079", "0.7102667", "0.709434", "0.7067098", "0.7028291", "0.70168835", "0.6969751", "0.69651526", "0.6953235", "0.69130766", "0.68735963", "0.68264115", "0.67686075", "0.67527515", "0.6737532", "0.6696314", "0.6633343", "0.66185445", "0.65991133", "0.6591205", "0.6521478", "0.65140915", "0.649753", "0.64814025", "0.6477833", "0.64679915", "0.64332235", "0.64156103", "0.63652027", "0.63646686", "0.6356767", "0.6346124", "0.63436717", "0.63361543", "0.63235676", "0.6300787", "0.6300051", "0.62890035", "0.6280524", "0.6280444", "0.6274777", "0.62741905", "0.6263712", "0.6259751", "0.62443787", "0.6235297", "0.6224492", "0.62091863", "0.62037975", "0.6195635", "0.6195516", "0.6176839", "0.6167489", "0.61535823", "0.614477", "0.61162174", "0.61008155", "0.6097523", "0.6094878", "0.60837346", "0.60666376", "0.60643995", "0.60318303", "0.6031528", "0.6027208", "0.60199374", "0.5968819", "0.59594166", "0.5953125", "0.5952639", "0.59334415", "0.59231937", "0.5921615", "0.5909667", "0.59055597", "0.5899663", "0.58780354", "0.5857702", "0.58544767", "0.58270097", "0.58129275", "0.580622", "0.57918125", "0.5787788", "0.57736677", "0.5764582", "0.57629514", "0.57449687", "0.57436657", "0.573611", "0.5723258", "0.5703714", "0.57014203", "0.57001305", "0.56866676", "0.5685716" ]
0.805788
0
Adds the git repo on the server as the local .git repo's origin, and pushes master to it. (do not include the .git ending in git_repo_name)
def add_prod_repo_as_origin_and_push(git_repo_name): local("""echo '[remote "origin"]' >> .git/config""") local(r"echo ' fetch = +refs/heads/*:refs/remotes/origin/*' >> .git/config") local(r"echo ' url = %s:webapps/git/repos/%s.git' >> .git/config" % (env.hosts[0], git_repo_name)) local(r"git push origin master")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def push(self):\n origin = self.git_repo.remotes.origin\n origin.push()", "def push(ctx):\n dufl_root = ctx.obj['dufl_root']\n git = Git(ctx.obj.get('git', '/usr/bin/git'), dufl_root)\n git.run('push', 'origin', git.working_branch())", "def push():\n branch = git.current_branch().name\n shell.run('git push -u origin {}'.format(branch))", "def __gitAddRemote(self):\n self.vcs.gitAddRemote(self.project.getProjectPath())", "def push(self, base_repo, branch=\"master\"):\n base_repo.push_to(self, branch)", "def _clone_gitrepo():\n # Puts git repo in ~/.ssh/config to avoid interaction due to missing known_hosts\n git_server = urllib.splituser(urllib.splittype(env.project['git_repo'])[0])[1]\n if not files.exists('~/.ssh/config') or not files.contains('~/.ssh/config', git_server):\n files.append('~/.ssh/config', ['host %s' % git_server, ' StrictHostKeyChecking no'])\n\n branch = env.project.get('git_branch', 'master')\n if files.exists(_interpolate(DJANGO_PROJECT_DIR)):\n print _interpolate('project %(project)s already exists, updating')\n remote('git pull origin %s' % branch)\n else:\n with cd(_interpolate(VIRTUALENV_DIR)):\n run(_interpolate('git clone %(git_repo)s %(project)s'))\n if branch != 'master':\n remote('git fetch origin %s:%s' % (branch, branch))\n remote('git checkout %s' % branch)", "def _sync_git_origin(cache_dir, site):\n\n git_dir = '--git-dir=' + cache_dir\n\n # silently try to add origin first, to lazily handle a missing case\n GIT.execute([git_dir, 'remote', 'add', 'origin', site],\n cwd=cache_dir, quiet=True)\n\n if not GIT.execute([git_dir, 'remote', 'set-url', 'origin', site],\n cwd=cache_dir):\n err('unable to ensure origin is set on repository cache')\n return False\n\n return True", "def push(self):\n out, err, code = self.command( [\"git\", \"push\"], self.directory )", "def gitAdd(filename, repo_dir):\n file_path = \"%s/%s\" % (repo_dir, filename)\n git(\"add\", file_path)", "def push(self):\n if self.forward:\n git = self.repo.git\n try:\n git.push()\n self.forward = \"pushed\"\n except:\n self.forward = \"push error - \"+self.forward", "def _git_push(branch):\n\n local(\n 'git push -f origin %(branch)s:%(branch)s' % {'branch': branch},\n capture=True\n )\n print('Pushed to %s' % branch)", "def create_prod_git_repo(git_repo_name):\n with cd(git_dir):\n run(\"git init --bare %s.git && cd %s.git && git config http.receivepack true\" %\n (git_repo_name,git_repo_name))", "def pushrepo(projectjson, repourl):\n try:\n components = projectjson['components']\n name = projectjson['name']\n reponame = name + '_sc'\n logger.debug(f\"repourl is : {repourl}\")\n bb_split = repourl.split(\"//\")\n bb_split[1] = f\"{username}:{escape_password}@\"+bb_split[1]\n newrepourl = \"//\".join(bb_split)\n local_code_setup(reponame, newrepourl)\n dst_makefile_path = f\"/tmp/{reponame}/Makefile\"\n if not os.path.exists(dst_makefile_path):\n src_makefile_path = f\"/tmp/skeleton-build/Makefile\"\n copy2(src_makefile_path, dst_makefile_path)\n print(\"Makefile added\")\n createcomponents(components, reponame, newrepourl, name)\n bitbucket.push_repo_to_bitbucket(f\"/tmp/{reponame}\")\n rmtree('/tmp/skeleton-build')\n rmtree(f'/tmp/{reponame}')\n return True\n except Exception as e:\n print(\"caught exception.: \", e)\n return False", "def push_code(repo, branch='gh-pages'):\n return repo.remotes.origin.push(branch)", "def push(ref='origin/master'):\n from fabric.api import local, run, cd\n from fabric.contrib.project import rsync_project\n local('pelican -s %s -d' % env.config_file)\n rsync_project(\n remote_dir=env.host_site_path,\n local_dir='output/',\n delete=True\n )\n if env.host_type != 'production':\n run(\"chown -R %(user)s:%(host_webserver_user)s %(host_site_path)s \"\n \"&& chmod -R 02750 %(host_site_path)s\" % env)", "def git_remote(git_repo):\n github_token = os.getenv(GITHUB_TOKEN_KEY)\n if github_token:\n return 'https://{0}@github.com/{1}'.format(\n github_token, git_repo)\n return '[email protected]:{0}'.format(git_repo)", "def sync_git_repo():\n # get the current dir of this script\n current_dir = os.path.dirname(os.path.realpath(sys.argv[0]))\n repo_path = os.path.join(current_dir,REPO_NAME)\n logging.info(\"Repository path is: \"+repo_path)\n # check to see if a repo has been init already\n try: \n repo = git.Repo(repo_path)\n logging.info(\"Git repo has already been created.\")\n except (git.exc.InvalidGitRepositoryError,git.exc.NoSuchPathError):\n logging.info(\"No git repo has been initialized for this module. Cloning from github.com now.\")\n repo_url = \"https://\"+REPO_USERNAME+\":\"+REPO_PERSONAL_ACCESS_TOKEN+\"@github.com/\"+REPO_USERNAME+\"/\"+REPO_NAME+\".git\"\n git.Repo.clone_from(repo_url,repo_path)\n logging.info(\"Repo cloned successfully.\")\n repo = git.Repo(repo_path)\n # now we have a valid repo created \n # pull the latest data from the repo\n origin = repo.remotes.origin\n origin.pull()\n # create the csv output dir if it does not exist\n Path(paho_csv_reports_dir).mkdir(parents=False, exist_ok=True)\n # get all csv files in this dir\n all_paho_csv_files = glob.glob(paho_csv_reports_dir+os.path.sep+\"*.csv\")\n # add all files in this dir to the repo index\n repo.index.add(all_paho_csv_files)\n logging.info(\"Added all .csv files from \"+paho_csv_reports_dir+\" to repo index.\")\n # set the commit message\n repo.index.commit(\"Automatic commit by \"+os.path.basename(__file__))\n # git push \n origin.push()\n logging.info(\"All csv files pushed to github repo successfully.\")", "def update_code_from_git():\n if not files.exists(REMOTE_REPO_DIR):\n with cd(HOME_DIR):\n run(\"git clone %s\" % MAIN_GITHUB_REP )\n with cd(REMOTE_REPO_DIR):\n run(\"git pull\")", "def commit_master(do_deploy=True):\n local(\"git pull origin master\")\n commit()\n local(\"git checkout master\")\n local(\"git pull origin master\")\n local(\"git merge dev\")\n local(\"git push origin master\")\n if do_deploy:\n deploy()\n deploy_config()", "def clone_into_project(git_repo_name):\n repo_dir = git_dir + \"/%s.git\" % git_repo_name\n with cd(remote_dir):\n run('rm -rf myproject')\n run(\"git clone %s %s\" % (repo_dir, project_name))\n run(\"echo 'MY_ENV=\\\"prod\\\"' > %s/%s/site_settings.py\" % (project_name,project_name))\n update_conf_file()", "def __gitPush(self):\n self.vcs.gitPush(self.project.getProjectPath())", "def deploy():\n remote_dir = os.path.abspath(os.path.join(REMOTE_BASE_DIR, REPO_NAME))\n \n with settings(warn_only=True):\n if run(\"test -d %s\" % (remote_dir)).failed:\n puts(red(\"[Repo %s does not exist on remote at: %s]\" % (REPO_NAME, remote_dir)))\n with cd(REMOTE_BASE_DIR):\n run(\"git clone %s %s\" % (REPO_URL, REPO_NAME))\n\n puts(yellow(\"[Write logs]\"))\n run(\"echo '-----------------------------' > %s\" % REMOTE_ERR_FILE)\n run(\"echo `date` >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' > %s\" % REMOTE_LOG_FILE)\n run(\"echo `date` >> %s\" % REMOTE_LOG_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_LOG_FILE)\n\n puts(yellow(\"[Update repo: %s]\" % REPO_NAME))\n with cd(remote_dir):\n run(\"git pull origin master >> %s 2>> %s\" %\n (REMOTE_LOG_FILE, REMOTE_ERR_FILE))\n\n # reminder new static files\n puts(yellow('Do not forget to run collect staticfiles on DJANGO server.'))", "def add_repo(repo, rdir_in, branch=None):\r\n # pylint: disable=R0915\r\n\r\n # Set defaults even if it isn't defined in settings\r\n mongo_db = {\r\n 'host': 'localhost',\r\n 'user': '',\r\n 'password': '',\r\n 'db': 'xlog',\r\n }\r\n\r\n # Allow overrides\r\n if hasattr(settings, 'MONGODB_LOG'):\r\n for config_item in ['host', 'user', 'password', 'db', ]:\r\n mongo_db[config_item] = settings.MONGODB_LOG.get(\r\n config_item, mongo_db[config_item])\r\n\r\n if not os.path.isdir(GIT_REPO_DIR):\r\n raise GitImportError(GitImportError.NO_DIR)\r\n # pull from git\r\n if not (repo.endswith('.git') or\r\n repo.startswith(('http:', 'https:', 'git:', 'file:'))):\r\n raise GitImportError(GitImportError.URL_BAD)\r\n\r\n if rdir_in:\r\n rdir = os.path.basename(rdir_in)\r\n else:\r\n rdir = repo.rsplit('/', 1)[-1].rsplit('.git', 1)[0]\r\n log.debug('rdir = {0}'.format(rdir))\r\n\r\n rdirp = '{0}/{1}'.format(GIT_REPO_DIR, rdir)\r\n if os.path.exists(rdirp):\r\n log.info('directory already exists, doing a git pull instead '\r\n 'of git clone')\r\n cmd = ['git', 'pull', ]\r\n cwd = rdirp\r\n else:\r\n cmd = ['git', 'clone', repo, ]\r\n cwd = GIT_REPO_DIR\r\n\r\n cwd = os.path.abspath(cwd)\r\n try:\r\n ret_git = cmd_log(cmd, cwd=cwd)\r\n except subprocess.CalledProcessError as ex:\r\n log.exception('Error running git pull: %r', ex.output)\r\n raise GitImportError(GitImportError.CANNOT_PULL)\r\n\r\n if branch:\r\n switch_branch(branch, rdirp)\r\n\r\n # get commit id\r\n cmd = ['git', 'log', '-1', '--format=%H', ]\r\n try:\r\n commit_id = cmd_log(cmd, cwd=rdirp)\r\n except subprocess.CalledProcessError as ex:\r\n log.exception('Unable to get git log: %r', ex.output)\r\n raise GitImportError(GitImportError.BAD_REPO)\r\n\r\n ret_git += '\\nCommit ID: {0}'.format(commit_id)\r\n\r\n # get branch\r\n cmd = ['git', 'symbolic-ref', '--short', 'HEAD', ]\r\n try:\r\n branch = cmd_log(cmd, cwd=rdirp)\r\n except subprocess.CalledProcessError as ex:\r\n # I can't discover a way to excercise this, but git is complex\r\n # so still logging and raising here in case.\r\n log.exception('Unable to determine branch: %r', ex.output)\r\n raise GitImportError(GitImportError.BAD_REPO)\r\n\r\n ret_git += '{0}Branch: {1}'.format(' \\n', branch)\r\n\r\n # Get XML logging logger and capture debug to parse results\r\n output = StringIO.StringIO()\r\n import_log_handler = logging.StreamHandler(output)\r\n import_log_handler.setLevel(logging.DEBUG)\r\n\r\n logger_names = ['xmodule.modulestore.xml_importer', 'git_add_course',\r\n 'xmodule.modulestore.xml', 'xmodule.seq_module', ]\r\n loggers = []\r\n\r\n for logger_name in logger_names:\r\n logger = logging.getLogger(logger_name)\r\n logger.setLevel(logging.DEBUG)\r\n logger.addHandler(import_log_handler)\r\n loggers.append(logger)\r\n\r\n try:\r\n management.call_command('import', GIT_REPO_DIR, rdir,\r\n nostatic=not GIT_IMPORT_STATIC)\r\n except CommandError:\r\n raise GitImportError(GitImportError.XML_IMPORT_FAILED)\r\n except NotImplementedError:\r\n raise GitImportError(GitImportError.UNSUPPORTED_STORE)\r\n\r\n ret_import = output.getvalue()\r\n\r\n # Remove handler hijacks\r\n for logger in loggers:\r\n logger.setLevel(logging.NOTSET)\r\n logger.removeHandler(import_log_handler)\r\n\r\n course_key = None\r\n location = 'unknown'\r\n\r\n # extract course ID from output of import-command-run and make symlink\r\n # this is needed in order for custom course scripts to work\r\n match = re.search(r'(?ms)===> IMPORTING course (\\S+)', ret_import)\r\n if match:\r\n course_id = match.group(1)\r\n try:\r\n course_key = CourseKey.from_string(course_id)\r\n except InvalidKeyError:\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)\r\n cdir = '{0}/{1}'.format(GIT_REPO_DIR, course_key.course)\r\n log.debug('Studio course dir = {0}'.format(cdir))\r\n\r\n if os.path.exists(cdir) and not os.path.islink(cdir):\r\n log.debug(' -> exists, but is not symlink')\r\n log.debug(subprocess.check_output(['ls', '-l', ],\r\n cwd=os.path.abspath(cdir)))\r\n try:\r\n os.rmdir(os.path.abspath(cdir))\r\n except OSError:\r\n log.exception('Failed to remove course directory')\r\n\r\n if not os.path.exists(cdir):\r\n log.debug(' -> creating symlink between {0} and {1}'.format(rdirp, cdir))\r\n try:\r\n os.symlink(os.path.abspath(rdirp), os.path.abspath(cdir))\r\n except OSError:\r\n log.exception('Unable to create course symlink')\r\n log.debug(subprocess.check_output(['ls', '-l', ],\r\n cwd=os.path.abspath(cdir)))\r\n\r\n # store import-command-run output in mongo\r\n mongouri = 'mongodb://{user}:{password}@{host}/{db}'.format(**mongo_db)\r\n\r\n try:\r\n if mongo_db['user'] and mongo_db['password']:\r\n mdb = mongoengine.connect(mongo_db['db'], host=mongouri)\r\n else:\r\n mdb = mongoengine.connect(mongo_db['db'], host=mongo_db['host'])\r\n except mongoengine.connection.ConnectionError:\r\n log.exception('Unable to connect to mongodb to save log, please '\r\n 'check MONGODB_LOG settings')\r\n cil = CourseImportLog(\r\n course_id=course_key,\r\n location=location,\r\n repo_dir=rdir,\r\n created=timezone.now(),\r\n import_log=ret_import,\r\n git_log=ret_git,\r\n )\r\n cil.save()\r\n\r\n log.debug('saved CourseImportLog for {0}'.format(cil.course_id))\r\n mdb.disconnect()", "def git_push(c):\n c.run(\"git submodule foreach git push \")", "def git_project(soup, github_user, github_pass, github_repo, github_name):\n giturl = 'https://{user}:{password}@github.com/{user}/{repo}.git'.format(\n user=github_user, password=github_pass, repo=github_repo\n )\n oldcwd = os.getcwd()\n tmpdir = tempfile.mkdtemp()\n gitdir = os.path.join(tmpdir, github_repo)\n cmd = 'git clone {} {}'.format(shlex.quote(giturl), shlex.quote(gitdir))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(gitdir)\n rhinoscrape(soup, github_user, github_name)\n cmd = 'git add .'\n subprocess.run(shlex.split(cmd), check=False)\n msg = 'Project committed by Rhino Repo'\n cmd = 'git commit -m {}'.format(shlex.quote(msg))\n subprocess.run(shlex.split(cmd), check=False)\n cmd = 'git push {}'.format(shlex.quote(giturl))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(oldcwd)\n shutil.rmtree(tmpdir, ignore_errors=True)", "def call_git_push():\n print(\"This will commit and push the git repo\")\n today = datetime.datetime.today()\n call([\"git\", \"add\", \".\"])\n call([\"git\", \"commit\", \"-m\", \"Updated notes. {:%Y-%m-%d %H:%M:%S}\".format(today)])\n call([\"git\", \"push\", \"origin\", \"master\"])", "def push_git(store, path):\n storedir, _ = os.path.split(path)\n cmd = [\"git\", \"add\", \".\"]\n subprocess.check_call(cmd, cwd=storedir)\n cmd = [\"git\", \"commit\", \"-m\", \"regolith auto-store commit\"]\n try:\n subprocess.check_call(cmd, cwd=storedir)\n except subprocess.CalledProcessError:\n warn(\"Could not git commit to \" + storedir, RuntimeWarning)\n return\n cmd = [\"git\", \"push\"]\n try:\n subprocess.check_call(cmd, cwd=storedir)\n except subprocess.CalledProcessError:\n warn(\"Could not git push from \" + storedir, RuntimeWarning)\n return", "def clone_github_repo(self):\n repository_local_destination = os.path.join(MODULES_PATH, 'github', self.username, self.repository_name)\n if not os.path.exists(repository_local_destination):\n Repo.clone_from(self.repo_url, repository_local_destination, branch='master')\n init_filename = os.path.join(repository_local_destination, '__init__.py')\n open(init_filename, 'a').close()", "def git_config_setup():\n\n local('git config user.email $GIT_EMAIL')\n local('git config user.name $GIT_NAME')\n\n local(\n 'git remote set-url --push origin '\n 'https://[email protected]/$TRAVIS_REPO_SLUG.git'\n )", "def push_sources():\n ensure_src_dir()\n push_rev = getattr(env, 'push_rev', None)\n if push_rev is None:\n push_rev = datetime.datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n local(\"git tag -a {0} -m \\\"Tagged for release\\\"\".format(push_rev))\n local(\"git push origin master --tags\")\n\n with cd(SRC_DIR):\n run(\"git pull origin master\")\n run(\"git fetch -t\")\n run(\"git checkout {0}\".format(push_rev))", "def add_push(self, name, refspec):\n\n err = C.git_remote_add_push(self._repo._repo, to_bytes(name), to_bytes(refspec))\n check_error(err)", "def remotes():\n # heroku env remotes\n for env in ('dev', 'staging', 'production'):\n app_name = APP_INFO[env]['heroku_app_name']\n if not app_name.startswith('app-name'):\n with settings(warn_only=True): \n local(\"git remote add {} [email protected]:{}.git\".format(APP_INFO[env]['heroku_remote_name'], app_name))", "def create_storer_git_repo():\n # first make teh destination directory\n rel_repo_path = vmcheckerpaths.repository\n abs_repo_path = vmcheckerpaths.abspath(rel_repo_path)\n _mkdir_if_not_exist(abs_repo_path)\n\n # then, if missing, initialize a git repo in it.\n repo_path_git = os.path.join(abs_repo_path, '.git')\n if not(os.path.isdir(repo_path_git)):\n # no git repo found in the dir.\n try:\n env = os.environ\n env['GIT_DIR'] = repo_path_git\n check_call(['git', 'init'], env=env)\n except CalledProcessError:\n logging.error('cannot create git repo in %s' % repo_path_git)", "def create_remote_repo(self, auth_token):\n github = Github(auth_token)\n user = github.get_user()\n try:\n return user.create_repo(self.repo)\n except GithubException as e:\n raise PermissionDenied(\n (e._GithubException__data['message'] +\n e._GithubException__data['errors'][0]['message']))", "def process_repo(vb, options):\n if not options.repo:\n return\n\n vb.add_repo(options.repo_os, options.repo_id, options.repo_name, options.repo_url,\n options.unique, options.repo_tags)", "def push(self, path=None, force_push=True):\n if path is not None:\n os.chdir(path)\n\n self.flatten()\n try:\n self.commit()\n run('git', 'push', '-f', 'origin', 'master')\n except:\n self.expand()\n raise\n self.expand()\n remote = run('git', 'config', '--get', 'remote.origin.url')\n self.print_message('Pushed changes to %s' % remote)", "def track(self, project_name='gitlab', branch='master',\n remote_name='gitlab', no_push=False):\n project = self.get_project(project_name)\n repo = Repo('.')\n if not remote_name:\n raise GitlabException('Invalid remote name {0}'.format(remote_name))\n try:\n self.get_remote(remote_name)\n except NotFound:\n pass\n else:\n raise GitlabException('Remote name {0} already exists.'.format(remote_name))\n remote = repo.create_remote(remote_name, project.ssh_url_to_repo)\n remote.push(branch, set_upstream=True)\n return remote", "def create_from_git(self, token: Any, repo: str):\n params = [token, repo, ]\n method = \"ProjectAPI.CreateFromGit\"\n self.__add_request(method, params, lambda payload: Definition.from_json(payload))", "def push_to_github(label):\n\n # Make sure we're in the right place to do all the git things.\n os.chdir(taas.data_root())\n\n # If there's nothing to do, then do nothing.\n if (not something_to_commit()):\n print(\"Nothing to commit.\")\n return\n\n branch_name = datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\")\n\n branch_name += \"-\" + label\n\n run([\"git\", \"checkout\", \"-b\", branch_name])\n\n run([\"git\", \"add\", \"-A\"])\n\n run([\"git\", \"status\"])\n\n run([\"git\", \"commit\", \"-m\", \"Automated update: \"+label])\n\n run([\"git\", \"push\", \"--set-upstream\", \"origin\", branch_name])", "def _create_github_repo(self):\n\n repo_dir = join(self.temp_dir, 'repo')\n subprocess.check_output(['git', 'init', repo_dir])\n\n subprocess.check_output(\n ['git', 'config', 'user.email', os.environ['GIT_EMAIL']],\n cwd=repo_dir\n )\n subprocess.check_output(\n ['git', 'config', 'user.name', os.environ['GIT_NAME']],\n cwd=repo_dir\n )\n\n content = statiki.get_travis_files_content(TEST_REPO, 'BOGUS', {})\n\n for info in content:\n path = join(repo_dir, info['name'])\n with open(path, 'w') as f:\n f.write(info['content'])\n\n subprocess.check_output(['git', 'add', path], cwd=repo_dir)\n subprocess.check_output(\n ['git', 'commit', '-m', '%s' % info['message']], cwd=repo_dir\n )\n\n subprocess.check_output(\n shlex.split('git remote add origin ..'), cwd=repo_dir\n )\n\n return repo_dir", "def git_clone(repo_path, path):\n r = envoy.run('git clone {repo} {path}'.format(repo=repo_path, path=path))\n if r.status_code != 0 and r.std_err != '':\n return False\n return True", "def init_remote_repo(repo_dir, url, branch='master'):\n log = LOG.bind(url=url, repo_dir=repo_dir, branch=branch)\n try:\n log.debug(\"Initializing local repo\")\n repo = git.Repo.init(repo_dir)\n remote = repo.create_remote('origin', url)\n remote.fetch()\n remote.refs[branch].checkout()\n return repo\n except Exception as e:\n msg = 'Error fetching remote commits'\n log.exception(msg, exc_info=e)\n raise GitToolException(msg)", "def add(orgname, pat, reponame, branchname):\n g = Github(pat)\n repo = g.get_organization(orgname).get_repo(reponame)\n all_files = []\n contents = repo.get_contents(\"\")\n while contents:\n file_content = contents.pop(0)\n if file_content.type == \"dir\":\n contents.extend(repo.get_contents(file_content.path))\n else:\n file = file_content\n all_files.append(str(file)\n .replace('ContentFile(path=\"', '')\n .replace('\")', ''))\n\n with open('./CODEOWNERS', 'r') as file:\n content = file.read()\n\n # Upload to github\n git_prefix = '.github/'\n git_file = git_prefix + 'CODEOWNERS'\n if git_file in all_files:\n contents = repo.get_contents(git_file)\n repo.update_file(contents.path,\n \"updating CODEOWNERS\",\n content,\n contents.sha,\n branch=branchname)\n print(git_file + ' updated for: ' + reponame)\n else:\n repo.create_file(git_file,\n \"adding CODEOWNERS\",\n content,\n branch=branchname)\n print(git_file + ' created for: ' + reponame)", "def commitRepository(self, repository):\n runCommand(\n [\"git\", \"-C\", repository.path, \"add\"] + glob.glob(repository.path + \"/*\")\n )\n runCommand([\"git\", \"-C\", repository.path, \"commit\", \"-m\", \"hop\"])", "def _git_add(repo, path, contents='example!\\n'):\n path.write_text(contents)\n subprocess.run(['git', '-C', repo, 'add', path], check=True)", "def _ensure_remotes(self, repo):\n\n remote_names = [r.name for r in repo.remotes]\n if 'origin' not in remote_names:\n repo.create_remote('origin', REPO_FROM)\n\n if 'destiny' not in remote_names:\n repo.create_remote('destiny', REPO_TO)", "def push():\n local('hg push jvacx')", "def upload_tar_from_git():\n require(\"release\", provided_by=[deploy])\n tree = prompt(\"Please enter a branch or SHA1 to deploy\", default=\"master\")\n local(\"git archive --format=tar %s | gzip > %s.tar.gz\" % (tree, env['release']))\n sudo(\"mkdir %(path)s/releases/%(release)s\" % env)\n put(\"%(release)s.tar.gz\" % env, \"%(path)s/packages/\" % env, use_sudo=True)\n sudo(\"cd %(path)s/releases/%(release)s && tar zxf ../../packages/%(release)s.tar.gz\" % env)\n local(\"rm %(release)s.tar.gz\" % env)", "def flush_repo():\n server = get_server()\n run(\"rm -rf %(project_name)s\" % env)\n git.clone()\n server.setup()", "def test_add_repo(self):\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.NO_DIR):\r\n git_import.add_repo(self.TEST_REPO, None, None)\r\n\r\n os.mkdir(self.GIT_REPO_DIR)\r\n self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.URL_BAD):\r\n git_import.add_repo('foo', None, None)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):\r\n git_import.add_repo('file:///foobar.git', None, None)\r\n\r\n # Test git repo that exists, but is \"broken\"\r\n bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))\r\n os.mkdir(bare_repo)\r\n self.addCleanup(shutil.rmtree, bare_repo)\r\n subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,\r\n cwd=bare_repo)\r\n\r\n with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):\r\n git_import.add_repo('file://{0}'.format(bare_repo), None, None)", "def push(self, remote, branch, *args):\n return self.cmd('push', remote, branch, *args)", "def repo_add(self, name, url, **kwargs):\n\n self.helm_client.repo_add(name, url, **kwargs)", "def git_clone_repo(remote_url, target_dir):\n log.info('Cloning [{}] into [{}]'.format(remote_url, target_dir))\n git.Repo.clone_from(remote_url, target_dir)", "def clone_repo(parent_repo, new_repo, org, dir=None):\n if dir:\n os.chdir(dir)\n run(\n f\"git clone --origin {parent_repo} [email protected]:{org}/{parent_repo}.git {new_repo}\",\n \"Clone an existing remote repository to the new name locally.\",\n )\n os.chdir(new_repo)\n run(\n f\"git remote set-url --push {parent_repo} no_push\",\n \"Disable pushing to the upstream (parent) repository.\",\n )\n run(\n f\"git remote add origin [email protected]:{org}/{new_repo}.git\",\n \"Add a new remote origin for the this repository.\",\n )\n run(\"git tag -d $(git tag -l)\", f\"Delete all local git tags from {parent_repo}\")\n run(\n rf\"find . \\( ! -regex '.*/\\.git/.*' \\) -type f -exec \"\n rf\"perl -pi -e s/{parent_repo}/{new_repo}/g {{}} \\;\",\n \"Search and replace repository name in source files.\",\n )\n lineage = {\n \"version\": LINEAGE_CONFIG_VERSION,\n \"lineage\": {\n \"skeleton\": {\"remote-url\": f\"https://github.com/{org}/{parent_repo}.git\"}\n },\n }\n with LINEAGE_CONFIG.open(\"w\") as f:\n yaml.dump(lineage, stream=f, explicit_start=True)\n run(\"git add --verbose .\", \"Stage modified files.\")\n run(\n 'git commit --message \"Rename repository references after clone.\"',\n \"Commit staged files to the new repository.\",\n )\n print(\"―\" * 80)\n print(\n f\"\"\"\nThe repository \"{parent_repo}\" has been cloned and renamed to \"{new_repo}\".\nUse the following commands to push the new repository to github:\n cd {os.path.join(dir, new_repo) if dir else new_repo}\n git push --set-upstream origin develop\n \"\"\"\n )", "def add_distdir ( self, distdir, src_uri=None, name=None ):\n self.repos.append ( BasicRepo (\n name=os.path.basename ( distdir ) if name is None else name,\n directory=distdir,\n src_uri=src_uri\n ) )", "def create_update_gitdir():\n if not os.path.exists(gitdname):\n retcode = subprocess.call('git clone '+repo, shell=True)\n if retcode != 0:\n msg = \"\"\"There was a problem cloning the repo\"\"\"\n raise Exception(msg)\n else: # directory exists, can't pull if you're not on a branch\n # just delete it and clone again. Lazy but clean solution.\n shutil.rmtree(gitdname)\n create_update_gitdir()", "def git_repo(tmp_path: Path) -> git.Repo:\n repo_dir = tmp_path.joinpath(\"repo\")\n shutil.copytree(\n src=f\"{DATA_DIR}/patches/previous/\",\n dst=str(repo_dir),\n )\n repo = git.Repo.init(repo_dir)\n repo.git.add(repo.working_tree_dir)\n repo.git.commit(\"-mInitial patches\")\n shutil.copytree(\n src=f\"{DATA_DIR}/patches/regenerated/\",\n dst=repo.working_tree_dir,\n dirs_exist_ok=True,\n )\n return repo", "def fetch(path):\n LOGGER.info('Post push request received, Updating %s', path)\n call(['cd \"' + path + '\" && git fetch'], shell=True)", "def push(args):\n if args.type == 'ssh':\n cache = set(args.remote_cache).union(set(args.cache))\n for path in sorted(cache):\n if os.path.exists(os.path.join(args.base, path)) and not remote_exists(args.sftp, os.path.join(args.remote_base, path)):\n print('push: {}'.format(path))\n ensure_remote(args.sftp, os.path.dirname(os.path.join(args.remote_base, path)))\n args.sftp.put(\n os.path.join(args.base, path),\n os.path.join(args.remote_base, path)\n )\n args.remote_cache.append(path)\n args.remote_update = True\n elif args.type == 's3':\n raise NotImplementedError('s3:// remote type not yet supported!')\n elif args.type == 'gs':\n raise NotImplementedError('gs:// remote type not yet supported!')\n return", "def add(name, url):\n click.echo(\"registered repo {} at url {}\".format(name, url))", "def d_ploy():\n\tlocal(\"git push origin --all\")\n\twith cd(LIVE_ROOT):\n\t\trun(\"git pull\")", "def remote_set(location, repo, remote='origin'):\n ensure_dir(location)\n with utils.cd(location):\n if remote_exists(location, remote):\n cmd = '/usr/bin/git remote rm {}'.format(remote)\n subprocess.check_call(cmd, shell=True)\n\n cmd = '/usr/bin/git remote add {} {}'.format(remote, repo)\n subprocess.check_call(cmd, shell=True)", "def __add_repo(repo_name, url):\n\n conn = sqlite3.connect(DTF_DB)\n cur = conn.cursor()\n\n entry = [(repo_name, url)]\n\n sql = ('INSERT INTO repos (repo_name, url)'\n 'VALUES (?, ?)')\n\n cur.executemany(sql, entry)\n conn.commit()\n\n return 0", "def clone_repo():\n with settings(warn_only=True):\n run('git clone %(repository_url)s %(repo_path)s' % env)", "def deploy_pull_master(self, restart=True):\n self.ops.local(\"cd \"+self.local_path+\"/src && git reset --hard HEAD && git pull origin master && git submodule update\")\n PiService.deploy(self, restart)", "def fetch_repo(root, repo, url, destination_temp):\n\n print \"Fetching %s from %s\" % (repo, url)\n\n if root.exists('repos/%s' % repo):\n print \"Repo %s exists, issuing a git pull...\" % repo\n call('cd repos/%s; git pull' % repo, shell=True)\n else:\n print \"Repo %s does not exist, issuing a git clone...\" % repo\n\n # explicitely create dir as implicit creation fails on server\n root.makedir('%s/%s' % (destination_temp, repo))\n call('cd repos; git clone %s %s' % (url, repo), shell=True)\n # call('git clone %s %s/%s > /dev/null 2>&1' % (repo['url'], source, repo['id']), shell=True)", "def update_code_from_git():\n if not files.exists(CODE_DIR):\n with cd(HOME_DIR):\n run(\"git clone %s\" % MAIN_GITHUB_REP )\n\n with cd(CODE_DIR):\n git_pull()", "def setup_repo(repo_url, repo_path, repo_push_url=None):\n with setup_repo_context(repo_url, repo_path, repo_push_url):\n pass", "def push():\n files = []\n for i in sp.check_output([\"git\", \"status\"]).decode().split(\"\\n\"):\n nf = \"#\\tnew file:\"\n mf = \"#\\tmodified:\"\n\t# Should have a deleted-files option here too.\n if i[: len(nf)] == nf or i[: len(mf)] == mf:\n f = i.split(\" \")[-1]\n files.append(f)\n files = list(set(files)) # Remove duplicates\n\n print(\"Committing these files: {}\".format(files))\n\n # Run all py scripts through black for formatting.\n# for f in files:\n# if f[-3:] == \".py\":\n# sp.call([\"black\", f])\n\n [sp.call([\"git\", \"add\", \"{}\".format(i)]) for i in files]\n\n commit_message = str(input(\"Enter commit message:\\n\"))\n commit_message = \"Updated\" if commit_message == \"\" else commit_message\n print(\"Committing with commit message of: {}\\n\\n\".format(commit_message))\n sp.call([\"git\", \"commit\", \"-m\", \"{}\".format(commit_message)])\n sp.call([\"git\", \"push\"])", "def push_latest_branch (product, which, main_branch):\n\n name = \"Latest_ACE7TAO3_\" + which\n\n if opts.push:\n vprint (\"Pushing branch\", name)\n ex (\"cd $DOC_ROOT/\" + product + \" && git push origin refs/heads/\" + name,\n allow_fail=True)", "def try_push_special_refs(repo):\n # test pushing to the 'private' dev/arcyd/ area, where arcyd will store\n # it's tracker branches\n repo('push', 'origin', '--dry-run', 'HEAD:refs/heads/dev/arcyd/test')\n\n # test pushing to the refs/arcyd area, where the 'landed' and 'abandoned'\n # archive branches will live\n repo('push', 'origin', '--dry-run', 'HEAD:refs/arcyd/test')", "def push_to_remotes(self, repo: git.Repo, tag: str) -> None:\n if self._upstream_remotes:\n self._logger.info('Start pushing to remotes: %s.',\n self._upstream_remotes)\n else:\n self._logger.info('No push remote was specified')\n return\n for remote_name in self._upstream_remotes:\n remote = self.get_remote(repo, remote_name)\n if remote:\n self._logger.info('Push %s to %s', tag, remote)\n remote.push(str(tag))\n else:\n self._logger.error(\n 'Can\\'t find remote with name `%s`', remote_name)", "def checkout(self):\n if self.repo_path and not self.git_repo:\n self.git_repo = git.Repo(self.repo_path)\n if self.git_repo:\n if self.validate and not self._valid_repo():\n raise ValueError(f'{self.org}/{self.repo} repository mismatch')\n return\n if os.path.isdir(os.path.join(self.local_path, '.git')):\n try:\n self.git_repo = git.Repo(self.local_path)\n self.git_repo.remote().fetch()\n self.git_repo.remote().pull()\n return\n except git.exc.InvalidGitRepositoryError:\n shutil.rmtree(self.local_path)\n token = None\n if 'github.com' in self.hostname:\n token = self.creds['github'].token\n elif 'github' in self.hostname:\n token = self.creds['github_enterprise'].token\n elif 'bitbucket' in self.hostname:\n token = self.creds['bitbucket'].token\n elif 'gitlab' in self.hostname:\n token = self.creds['gitlab'].token\n url_path = f'{self.hostname}/{self.org}/{self.repo}.git'\n try:\n self.git_repo = git.Repo.clone_from(\n f'{self.scheme}://{token}@{url_path}',\n self.local_path,\n branch=self.branch\n )\n except git.exc.GitCommandError as e:\n raise git.exc.GitCommandError(\n [c.replace(token, f'{\"\":*<10}') for c in e.command],\n e.status,\n e.stderr.strip('\\n')\n ) from None", "def init_git_repo(c, repo_name, org_name='kinecosystem', remote='origin', branch='master'):\n # clone git repo if it doesn't exist,\n # otherwise checkout master branch\n dir_name = '{}-git'.format(repo_name)\n git_url = 'https://github.com/{}/{}.git'.format(org_name, repo_name)\n\n if not os.path.isdir('{}/{}/volumes/{}'.format(os.getcwd(), c.cwd, dir_name)):\n print('%s git repository doesn\\'t exist, cloning' % repo_name)\n c.run('git clone --branch {branch} {git_url} volumes/{dir_name}'.format(branch=branch, git_url=git_url, dir_name=dir_name))\n else:\n with c.cd('volumes/{}'.format(dir_name)):\n if is_git_dir_modified(c):\n raise Exit('Stopping, please clean changes and retry')\n\n git_dir_checkout_branch(c, org_name, repo_name, remote, branch)\n\n return dir_name", "def create_clowder_repo(self, url, branch, depth=0):\n\n if self.existing_git_repository(self.repo_path):\n return\n self._init_repo()\n self._create_remote(self.remote, url, remove_dir=True)\n self._checkout_new_repo_branch(branch, depth)", "def register_git_repository(args, namespace, notifier=None):\n\n tempdir = tempfile.mkdtemp()\n subprocess.check_call(\n \"\"\"\n cd {} &&\n git clone {} user_code\n \"\"\".format(tempdir, args.git_repository), shell=True)\n return register(Service, args, namespace,\n os.path.join(tempdir, 'user_code'), notifier)", "def create_bare_repo(self, domain):\n\n domain_dir = self.get_domaindir(domain)\n www_dir = domain_dir + \"/www\"\n www_git = domain_dir + \"/www.git\"\n hook_post_receive_file = www_git + \"/hooks/post-receive\"\n\n if not os.path.exists(www_git):\n os.makedirs(www_git)\n git_init_command = \"cd \" + www_git\n git_init_command += \" && git init --bare\"\n subprocess.call(git_init_command, shell=True)\n\n if not os.path.isfile(hook_post_receive_file):\n with open(hook_post_receive_file, \"w\") as file:\n post_receive_content = \"#!/bin/sh\"\n post_receive_content += \"\\nGIT_WORK_TREE=\" + www_dir\n post_receive_content += \" git checkout -f\"\n file.write(post_receive_content)\n subprocess.call(\"chmod +x \" + hook_post_receive_file, shell=True)", "def _git(self, dep):\n git = sh.Command(\"git\")\n for repo in self.dependency_dict[dep][\"git_list\"]:\n logger.debug(f\" cloning {dep} repo {repo}\")\n git.clone(repo, **self.output_kwargs)", "def push_commits(self, verbose=True):\n # The subprocess will return a non-zero exit code even if it succeeded.\n # Check its output to determine whether it worked.\n push_proc = subprocess.run(\n [\"git\", \"push\"],\n cwd=self.path,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n universal_newlines=True,\n )\n if \"updated in conservator\" not in push_proc.stdout:\n if \"Everything up-to-date\" in push_proc.stdout:\n logger.warning(push_proc.stdout)\n else:\n logger.error(\n \"Server did not accept changes to index.json:\\n%s\", push_proc.stdout\n )\n raise RuntimeError(\"Failed to push changes to index.json\")\n self.pull(verbose)", "def upload(ui, repo, name, **opts):\n\trepo.ui.quiet = True\n\tcl, err = LoadCL(ui, repo, name, web=True)\n\tif err != \"\":\n\t\treturn err\n\tif not cl.local:\n\t\treturn \"cannot upload non-local change\"\n\tcl.Upload(ui, repo)\n\tprint \"%s%s\\n\" % (server_url_base, cl.name)\n\treturn", "def addRepository(self, name, url):\n sslVerify = \"yes\" if url.startswith(\"https\") else \"no\"\n self.manager.addKickstartRepository(self.currentProject, baseurl=url,\n name=name,\n ssl_verify=sslVerify)\n self.manager.saveKickstartFile(self.currentProject)\n self.refresh()", "def _add_repo(repo_name, repo_url, index):\n\n package_manager = _get_package_manager()\n package_manager.add_repo(repo_name, repo_url, index)\n\n return 0", "def update(filepath, github_account):\n repo = _git.clone_from_github(\n _repo_path(), join(filepath, _repo_name()), github_account=github_account)\n if _update_disco(repo, github_account) > 0:\n repo.push()", "def git_server():\n log('Instalando git', yellow)\n sudo('apt-get -y install git')", "def _do_push(self, line: str) -> None:\n remote_head = None\n while True:\n src, dst = line.split(\" \")[1].split(\":\")\n if src == \"\":\n self._delete(dst)\n else:\n self._push(src, dst)\n if self._first_push:\n if not remote_head or src == git.symbolic_ref(\"HEAD\"):\n remote_head = dst\n line = readline()\n if line == \"\":\n if self._first_push:\n self._first_push = False\n if remote_head:\n if not self.write_symbolic_ref(\"HEAD\", remote_head):\n self._trace(\"failed to set default branch on remote\", Level.INFO)\n else:\n self._trace(\"first push but no branch to set remote HEAD\")\n break\n _write()", "def pull(self, repo, remote_name='github,gitee', branch='master'):\r\n repo.remotes.set_url('gitee', self.UrlGitee)\r\n repo.remotes.set_url('github', self.UrlGithub)\r\n for remote in repo.remotes:\r\n if remote.name in remote_name:\r\n AppLog.info('update from: {}'.format(remote.name))\r\n remote.fetch()\r\n remote_master_id = repo.lookup_reference(\r\n 'refs/remotes/origin/%s' % (branch)).target\r\n merge_result, _ = repo.merge_analysis(remote_master_id)\r\n # Up to date, do nothing\r\n if merge_result & pygit2.GIT_MERGE_ANALYSIS_UP_TO_DATE:\r\n return\r\n # We can just fastforward\r\n elif merge_result & pygit2.GIT_MERGE_ANALYSIS_FASTFORWARD:\r\n repo.checkout_tree(repo.get(remote_master_id))\r\n try:\r\n master_ref = repo.lookup_reference('refs/heads/%s' %\r\n (branch))\r\n master_ref.set_target(remote_master_id)\r\n except KeyError:\r\n repo.create_branch(branch, repo.get(remote_master_id))\r\n repo.head.set_target(remote_master_id)\r\n return\r\n elif merge_result & pygit2.GIT_MERGE_ANALYSIS_NORMAL:\r\n repo.merge(remote_master_id)\r\n\r\n if repo.index.conflicts is not None:\r\n for conflict in repo.index.conflicts:\r\n for c in conflict:\r\n if not c:\r\n continue\r\n AppLog.error('Conflicts found in: %s', c.path)\r\n raise AssertionError('Conflicts, ahhhhh!!')\r\n\r\n user = repo.default_signature\r\n tree = repo.index.write_tree()\r\n repo.create_commit('HEAD', user, user, 'Merge!', tree,\r\n [repo.head.target, remote_master_id])\r\n # We need to do this or git CLI will think we are still\r\n # merging.\r\n repo.state_cleanup()\r\n return\r\n else:\r\n raise AssertionError('Unknown merge analysis result')", "def push(self, remote, branch, curr_fb_path):\n env = os.environ.copy()\n env[\"GIT_TERMINAL_PROMPT\"] = \"0\"\n p = subprocess.Popen(\n [\"git\", \"push\", remote, branch],\n stdout=PIPE,\n stderr=PIPE,\n cwd=os.path.join(self.root_dir, curr_fb_path),\n env=env,\n )\n _, error = p.communicate()\n\n response = {\"code\": p.returncode}\n\n if p.returncode != 0:\n response[\"message\"] = error.decode(\"utf-8\").strip()\n\n return response", "def branch(self, name, ref=\"HEAD\"):\n self._git.create_head(name, ref)\n self.checkout(name)", "def add_fetch(self, name, refspec):\n\n err = C.git_remote_add_fetch(self._repo._repo, to_bytes(name), to_bytes(refspec))\n check_error(err)", "def add(self, filename, top_repo_path):\n my_output = subprocess.check_output([\"git\", \"add\", filename], cwd=top_repo_path)\n return my_output", "def deploy(branch=None, to='master', keep=False, heroku_app=HEROKU_APP):\n if branch is None:\n proc = subprocess.run(['git', 'branch'], stdout=subprocess.PIPE)\n lines = [\n line[2:]\n for line in proc.stdout.decode('utf8').splitlines()\n if line.startswith('* ')\n ]\n branch = lines[0]\n\n assert branch != to\n\n subprocess.run(['git', 'checkout', to])\n subprocess.run(['git', 'merge', branch])\n if not keep:\n subprocess.run(['git', 'branch', '--delete', branch])\n subprocess.run(['git', 'push'])\n\n migrate(heroku_app)", "def setorigin(self):\n try:\n origin = self.repo.remotes.origin\n if origin.url != self.origin_url:\n log.debug('[%s] Changing origin url. Old: %s New: %s',\n self.name, origin.url, self.origin_url)\n origin.config_writer.set('url', self.origin_url)\n except AttributeError:\n origin = self.repo.create_remote('origin', self.origin_url)\n log.debug('[%s] Created remote \"origin\" with URL: %s',\n self.name, origin.url)", "def sync_git(store, path):\n storedir, _ = os.path.split(path)\n # get or update the storage\n if os.path.isdir(storedir):\n cmd = [\"git\", \"pull\"]\n cwd = storedir\n else:\n cmd = [\"git\", \"clone\", store[\"url\"], storedir]\n cwd = None\n subprocess.check_call(cmd, cwd=cwd)", "def create_remote_gitlab_repo(repository: Repository, username: str, visibility: str,\n access_token: Optional[str] = None) -> None:\n\n default_remote = repository.client_config.config['git']['default_remote']\n admin_service = None\n for remote in repository.client_config.config['git']['remotes']:\n if default_remote == remote:\n admin_service = repository.client_config.config['git']['remotes'][remote]['admin_service']\n break\n\n if not admin_service:\n raise ValueError('admin_service could not be found')\n\n try:\n # Add collaborator to remote service\n mgr = GitLabManager(default_remote, admin_service,\n access_token=access_token or 'invalid')\n mgr.configure_git_credentials(default_remote, username)\n mgr.create_labbook(namespace=InventoryManager().query_owner(repository),\n labbook_name=repository.name,\n visibility=visibility)\n repository.add_remote(\"origin\", f\"https://{default_remote}/{username}/{repository.name}.git\")\n except Exception as e:\n raise GitLabRemoteError(e)", "def git_clone(git_url=QMK_GIT_URL, git_branch=QMK_GIT_BRANCH):\n repo = repo_name(git_url)\n zipfile_name = repo + '.zip'\n command = ['git', 'clone', '--single-branch', '-b', git_branch, git_url, repo]\n\n try:\n check_output(command, stderr=STDOUT, universal_newlines=True)\n os.chdir(repo)\n hash = check_output(['git', 'rev-parse', 'HEAD'])\n open('version.txt', 'w').write(hash.decode('cp437') + '\\n')\n repo_cloned = True\n\n except CalledProcessError as build_error:\n repo_cloned = False\n logging.error(\"Could not clone %s: %s (returncode: %s)\" % (repo, build_error.output, build_error.returncode))\n logging.exception(build_error)\n\n os.chdir('..')\n\n if repo_cloned:\n store_source(zipfile_name, repo, 'cache')\n\n return True", "def cmd_create(self):\n self.repo.create()\n\n # Add .gitignore.\n self.repo.add_files({'.gitignore': '.swp\\n'}, FIRST_COMMIT_MSG)\n\n # Create the etc and timestamps branches.\n self.repo.checkout('etc', create=True)\n self.repo.checkout('timestamps', create=True)\n\n self.repo.checkout('master')\n self.repo.init()\n self.update_repository()\n print('Git repository created at %s' % self.repodir)", "def local_remote_repository(svc_client, tmp_path, mock_redis, identity_headers, real_sync):\n from click.testing import CliRunner\n from git.config import GitConfigParser, get_config_path\n from marshmallow import pre_load\n\n from renku.cli import cli\n from renku.core.utils.contexts import chdir\n from renku.service.config import PROJECT_CLONE_NO_DEPTH\n from renku.service.serializers import cache\n\n # NOTE: prevent service from adding an auth token as it doesn't work with local repos\n def _no_auth_format(self, data, **kwargs):\n return data[\"git_url\"]\n\n orig_format_url = cache.ProjectCloneContext.format_url\n cache.ProjectCloneContext.format_url = _no_auth_format\n\n # NOTE: mock owner/project so service is happy\n def _mock_owner(self, data, **kwargs):\n data[\"owner\"] = \"dummy\"\n\n data[\"name\"] = \"project\"\n data[\"slug\"] = \"project\"\n\n return data\n\n orig_set_owner = cache.ProjectCloneContext.set_owner_name\n cache.ProjectCloneContext.set_owner_name = pre_load(_mock_owner)\n\n remote_repo_path = tmp_path / \"remote_repo\"\n remote_repo_path.mkdir()\n\n remote_repo = Repo.init(remote_repo_path, bare=True)\n remote_repo_checkout_path = tmp_path / \"remote_repo_checkout\"\n remote_repo_checkout_path.mkdir()\n\n remote_repo_checkout = remote_repo.clone(str(remote_repo_checkout_path))\n\n home = tmp_path / \"user_home\"\n home.mkdir()\n\n with modified_environ(HOME=str(home), XDG_CONFIG_HOME=str(home)):\n try:\n with GitConfigParser(get_config_path(\"global\"), read_only=False) as global_config:\n global_config.set_value(\"user\", \"name\", \"Renku @ SDSC\")\n global_config.set_value(\"user\", \"email\", \"[email protected]\")\n\n # NOTE: init \"remote\" repo\n runner = CliRunner()\n with chdir(remote_repo_checkout_path):\n\n result = runner.invoke(\n cli, [\"init\", \".\", \"--template-id\", \"python-minimal\", \"--force\"], \"\\n\", catch_exceptions=False\n )\n assert 0 == result.exit_code, format_result_exception(result)\n\n remote_name = remote_repo_checkout.active_branch.tracking_branch().remote_name\n remote = remote_repo_checkout.remotes[remote_name]\n result = remote.push()\n finally:\n try:\n shutil.rmtree(home)\n except OSError: # noqa: B014\n pass\n\n payload = {\"git_url\": f\"file://{remote_repo_path}\", \"depth\": PROJECT_CLONE_NO_DEPTH}\n response = svc_client.post(\"/cache.project_clone\", data=json.dumps(payload), headers=identity_headers)\n\n assert response\n assert {\"result\"} == set(response.json.keys()), response.json\n\n project_id = response.json[\"result\"][\"project_id\"]\n assert isinstance(uuid.UUID(project_id), uuid.UUID)\n\n try:\n yield svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout\n finally:\n cache.ProjectCloneContext.format_url = orig_format_url\n cache.ProjectCloneContext.set_owner_name = orig_set_owner\n\n try:\n shutil.rmtree(remote_repo_path)\n except OSError: # noqa: B014\n pass\n\n try:\n shutil.rmtree(remote_repo_checkout_path)\n except OSError: # noqa: B014\n pass", "def _push_to_server(self) -> None:\n pass", "def clone(ctx, path_base, repo_url, dir_target):\n if 'github' in repo_url:\n # Just to make sure ssh agent forwarding works well.\n ctx.run('ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts', warn=True)\n ctx.run('ssh -T [email protected]', warn=True)\n\n with ctx.cd(path_base):\n ctx.run(f'git clone -v {repo_url} {dir_target}')", "def cmd_add(self, args):\n log.info(\"adding '{}' to the repository...\".format(args.file))\n self.check_repo()\n # TODO: implement encryption\n if args.encrypted:\n raise NotImplementedError('encryption is not implemented yet')\n # check if file exists\n if not os.path.exists(args.file):\n log.error('file not found: {}'.format(args.file))\n if os.path.islink(args.file):\n if os.path.realpath(args.file).startswith(self.files_path):\n log.error('file is already in the repository: {}'.format(args.file))\n else:\n log.error('can not add link file: {}'.format(args.file))\n # check if file is in a subfolder of the home directory\n if not args.file.startswith(self.homedir):\n log.error('file is not in a subfolder of {}'.format(self.homedir))\n if args.file.startswith(self.path):\n log.error(\"files inside the repository can't be added\")\n # generate paths\n repo_relpath = args.file.replace(self.homedir, '')[1:]\n filename = os.path.split(args.file)[1]\n repo_subdirs = os.path.split(repo_relpath)[0].split(os.path.sep)\n repo_dir = os.path.join(self.files_path, *repo_subdirs)\n repo_file = os.path.join(repo_dir, filename)\n # move file into the repository and create symlink\n if not os.path.exists(repo_dir):\n log.debug('creating folder: {}'.format(repo_dir))\n os.makedirs(repo_dir)\n log.debug('moving {} to {}'.format(args.file, repo_file))\n shutil.move(args.file, repo_file)\n log.debug('creating symlink')\n os.symlink(repo_file, args.file)\n # add new file to Git\n log.debug('adding new file to Git')\n self.git_commit('add {}'.format(args.file))\n log.info('done')" ]
[ "0.74367875", "0.7217327", "0.7001284", "0.68715334", "0.6737078", "0.6531534", "0.65219814", "0.6468138", "0.6464436", "0.6363024", "0.632618", "0.62762666", "0.6264901", "0.62638634", "0.6229483", "0.62241775", "0.61916083", "0.6184975", "0.6179584", "0.61400473", "0.6078943", "0.6049489", "0.6038741", "0.60308397", "0.6022022", "0.59970856", "0.5950285", "0.5944758", "0.59374416", "0.5933768", "0.59279114", "0.59105784", "0.5903896", "0.5848664", "0.58484733", "0.58189195", "0.5811869", "0.58061004", "0.57883483", "0.57691723", "0.5763072", "0.5741437", "0.57069683", "0.56989175", "0.5697954", "0.5684583", "0.5682906", "0.56823397", "0.5676803", "0.5670262", "0.5664246", "0.56633854", "0.5663177", "0.56490916", "0.56329", "0.56268907", "0.56256783", "0.5608928", "0.5607725", "0.5598706", "0.55856", "0.55708694", "0.55680066", "0.55649775", "0.55481946", "0.554672", "0.5539576", "0.5533157", "0.552791", "0.5516409", "0.5503676", "0.5499952", "0.54892737", "0.54885375", "0.5485005", "0.54809946", "0.54809755", "0.5477843", "0.54713607", "0.54684794", "0.5465127", "0.5462509", "0.5454189", "0.5451012", "0.54451334", "0.5438055", "0.54306704", "0.5422655", "0.54154426", "0.54134536", "0.54107314", "0.54089135", "0.54023737", "0.539859", "0.53876936", "0.5380926", "0.5369166", "0.53659385", "0.5362297", "0.53570634" ]
0.81499577
0
Updates the apache httpd.conf file to point to the new project instead of the default 'myproject'. This is called as part of clone_into_project, or you can call
def update_conf_file(): filepath = remote_dir + "/apache2/conf/httpd.conf" fabric.contrib.files.sed(filepath, 'myproject', project_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_project():\n _require_environment()\n\n # Grants write rights on log dir for the admin group\n log_dir = '%s/log' % _interpolate(VIRTUALENV_DIR)\n if files.exists(log_dir):\n sudo('chmod -R g+w %s' % log_dir)\n\n # Updates from git, issues Django syncdb, South migrate, Collecstatic and resets Apache\n branch = env.project.get('git_branch', 'master')\n with prefix(_django_prefix()):\n with cd(_django_project_dir()):\n with settings(hide('warnings'), warn_only=True):\n run('git fetch origin %s:%s' % (branch, branch))\n run('git checkout %s' % branch)\n with settings(hide('warnings'), warn_only=True):\n run('git pull origin %s' % branch)\n run('django-admin.py syncdb --noinput')\n run('django-admin.py migrate')\n run('touch config/wsgi*')\n run('django-admin.py collectstatic --noinput')", "def update_webserver_config():\n require('ws_config_path', provided_by=[prod])\n apache_sa = '/etc/apache2/sites-available/'\n apache_se = '/etc/apache2/sites-enabled/'\n nginx_sa = '/etc/nginx/sites-available/'\n nginx_se = '/etc/nginx/sites-enabled/'\n\n sudo('rm %s%s' % (apache_sa, env.project_name))\n sudo('rm %s%s' % (apache_se, env.project_name))\n\n sudo('rm %s%s' % (nginx_sa, env.project_name))\n sudo('rm %s%s' % (nginx_se, env.project_name))\n\n put('%sapache2/sites-available/*' % (env.ws_config_path), apache_sa, use_sudo=True)\n put('%snginx/sites-available/*' % (env.ws_config_path), nginx_sa, use_sudo=True)\n\n sudo('ln -s %s%s %s' % (apache_sa, env.project_name, apache_se))\n sudo('ln -s %s%s %s' % (nginx_sa, env.project_name, nginx_se))\n restart_webservers()", "def touch_project():\n remote('touch config/wsgi*')", "def config_apache():\n with lcd(env.projectroot):\n with cd(\"/etc/apache2\"):\n put(\"manage/sysconf/%(target)s/etc/apache2/sites-available/lagrummet\" % env, \"sites-available\",\n use_sudo=True)\n try:\n sudo(\"ln -s ../sites-available/lagrummet sites-enabled/lagrummet\")\n except:\n print \"Ignored failed to create symbolic link!\"", "def configure_project():\n pass", "def install_apache_conf():\n sudo('cp -T %(repo_path)s/apache/%(settings)s/apache %(apache_config_path)s' % env)", "def configure_httpd_wsgi_conf(self):\n raise NotImplementedError()", "def clone_into_project(git_repo_name):\n repo_dir = git_dir + \"/%s.git\" % git_repo_name\n with cd(remote_dir):\n run('rm -rf myproject')\n run(\"git clone %s %s\" % (repo_dir, project_name))\n run(\"echo 'MY_ENV=\\\"prod\\\"' > %s/%s/site_settings.py\" % (project_name,project_name))\n update_conf_file()", "def config_apache_command(server_name):\n if not server_name:\n server_name = socket.getfqdn()\n print(\"\"\"# Virtual Host config for BetterWeather WSGI Server\n# Required modules: mod_wsgi\n<VirtualHost *:80>\n ServerName \"\"\", end='')\n print(server_name, end='')\n print(\"\"\"\n WSGIDaemonProcess betterweather threads=15\n WSGIScriptAlias / \"\"\", end='')\n print(app.root_path + '/wsgi.py', end='')\n print(\"\"\"\n <Directory \"\"\", end='')\n print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))).__str__() + '>', end='')\n print(\"\"\"\n WSGIProcessGroup betterweather\n WSGIApplicationGroup %{GLOBAL}\n \n <IfVersion < 2.4>\n Allow from all\n Order allow,deny\n </IfVersion>\n \n <IfVersion >= 2.4>\n Require all granted\n </IfVersion>\n \n <IfModule mod_headers.c>\n Header set Cache-Control \"no-cache, no-store, must-revalidate\"\n Header set Pragma \"no-cache\"\n Header set Expires 0\n </IfModule>\n </Directory>\n</VirtualHost>\"\"\")", "def deploy_project(self, svn_repo, co_dir, path_to_static, database_name, apache_conf_file_path, project_port, svn_username='', svn_password='', db_username='', db_password='', sql_paths_list=[], changes_dict={}):\n self.checkout(svn_repo, co_dir, svn_username, svn_password)\n self.change_static_to_pro(path_to_static)\n self.create_db(database_name, sql_paths_list, db_username, db_password)\n self.change_settings(co_dir, changes_dict)\n self.create_vhost(apache_conf_file_path, project_dir=co_dir, project_port=project_port)\n self.server_restart()", "def update_website_configuration():\n put('config/supervisor_website.conf', \n '/etc/supervisor/conf.d/gunicorn.conf', \n use_sudo=True)\n sudo('supervisorctl update')\n sudo('supervisorctl reload')", "def install(self, project, acl=None):\n self.config.options['project_name'] = project.name\n self.config.options['show_right_bar'] = True\n super(ForgeWikiApp, self).install(project, acl=acl)\n\n root_page_name = self.default_root_page_name\n Globals(app_config_id=c.app.config._id, root=root_page_name)\n self.upsert_root(root_page_name)", "def apache():\n\n get_details()\n\n context = {\n \"site_name\": env.site_name,\n \"paths\": env.paths,\n \"project_name\": env.project_name,\n }\n\n apache_path = '/etc/httpd/sites-available/'\n\n if exists(apache_path):\n with cd(apache_path):\n if exists(env.site_name):\n print \"apache site configuration already exists!\"\n return\n else:\n upload_template(\"apache_conf.txt\", \n env.site_name,\n context,\n use_jinja=True,\n template_dir=JINJA_TEMPLATE_PATH,\n use_sudo=True)\n print \"Created apache site configuration file. Don't forget to enable it!\"\n return\n else:\n print \"It doesn't seem like you have apache installed.\"\n return", "def update_project(arn=None, name=None, defaultJobTimeoutMinutes=None):\n pass", "def _change_project(self):\n project_key = utils.prompt_string(\n 'You are currently managing Google Cloud Project {!r}.\\n'\n 'This project is currently saved as {!r}.\\n'\n 'All of the currently configured projects include: {}.\\n'\n 'Which project would you like to switch to?'.format(\n self._config.project, self._config.key,\n ', '.join(common.get_available_configs(self._config.path))))\n return _Manager.new(\n self._config.path, self._prefer_gcs, project_key=project_key,\n version=self._version)", "def edit_files(project_name, app_name):\n SETTINGS = f'{project_name}/backend/backend/settings.py'\n PACKAGE_JSON = f'{project_name}/frontend/package.json'\n\n\n c1 = f\"\\n \\t'corsheaders', \\n\\t'rest_framework', \\n\\t'{app_name}',\\n\"\n add_to_line(SETTINGS, 32, c1 )\n\n c2 = f\"\\n \\t'corsheaders.middleware.CorsMidleware',\\n\"\n add_to_line(SETTINGS, 44, c2 )\n \n with open(SETTINGS, 'a+') as f:\n f.write(\"\\nCORS_ORIGIN_WHITELIST = ['localhost:3000/']\")\n\n c3 = '\\n\\t\"proxy\": \"http://localhost:8000\",\\n'\n add_to_line(PACKAGE_JSON, 3, c3)", "def _prep_client_dist_for_project(project_env, project_root_dir):\n #need to make the project's index.html the index.html that tomcat will find\n clientdir = os.path.join(project_root_dir, 'client')\n index_target_fn = os.path.join(clientdir, 'index.html')\n\n index_src_base = 'index.' + project_env.get_project_name() + '.html'\n index_src_fn = os.path.join(clientdir, index_src_base)\n cmd = 'cp ' + index_src_fn + ' ' + index_target_fn\n cr = container_users.make_host_user_command_runner()\n result = cr.run(cmd)\n return result.get_exit_code()", "def set_config_path(self, new_config_path):\n oldpath = self.get_config_path()\n cdir, cfile = os.path.split(new_config_path)\n \n if not cdir.startswith('/'):\n cdit='/'+cdir\n if not cfile:\n cfile = 'site.yaml'\n\n self.dropbox_base_dir = cdir\n self.dropbox_site_yaml = cfile\n newpath = self.get_config_path()\n if newpath !=oldpath:\n return oldpath", "def update_project():\n with cd(env.code_dir):\n with _virtualenv():\n run('git pull origin master')\n install_requirements()\n perform_migration()\n collect_static()", "def createFakeSphinxProject(self):\n self.sourceDir.child(\"conf.py\").setContent(self.confContent.encode())\n self.sourceDir.child(\"index.rst\").setContent(self.indexContent.encode())", "def create_vhost(self, apache_conf_file_path, project_dir, project_port, queue=None):\n #append to the one n only httpd.conf....use apacheconfparser before to know the settings?\n import os\n #Apache conf file\n if self.SHELL == 'Local':\n if os.path.exists(apache_conf_file_path):\n httpd = apache_conf_file_path\n else:\n raise Exception('Apache Config file httpd.conf not found!')\n elif self.SHELL == 'Remote':\n get(apache_conf_file_path, 'tmp')\n httpd = os.path.join(os.path.dirname(__name__), 'tmp', 'httpd.conf')\n else:\n raise Exception('Shell type was not given properly!')\n\n #parse apache conf info here?\n import ApacheParser as AP\n from ApacheSections import parse_section\n\n #create config object and parse the file\n c = AP.ApacheConfig('httpd')\n confs = c.parse_file(httpd)\n configs = parse_section(confs.children)\n\n ports = [each[1][0] for each in configs if each[0]=='Listen']\n unneeded = [each[0] for each in configs if each[0] in ('WSGIPythonPath', 'WSGIRestrictStdin', 'WSGIRestrictStdout')]\n\n print apache_conf_file_path, project_dir, project_port\n #create the vhost conf\n try:\n project_dir = project_dir.rstrip('/')\n project_dir = project_dir.rstrip('\\\\')\n\n if ('/' in project_dir and '/' != os.path.sep) or ('\\\\' in project_dir and '\\\\' != os.path.sep):\n os.path.sep = os.path.altsep\n\n PATH_TO_PROJECT_ROOT = project_dir\n PORT = project_port\n PATH_TO_PUBLIC_DIR = os.path.sep.join([project_dir, 'public'])\n PATH_TO_STATIC = os.path.sep.join([PATH_TO_PUBLIC_DIR, 'static'])\n PATH_TO_DIVINEBA_WSGI = os.path.sep.join([PATH_TO_PUBLIC_DIR, 'divineba.wsgi'])\n\n #The PrismERP FurinaPy vhost config template\n vhost = os.path.join(os.path.dirname(__name__), 'tmp', 'PyPrismVhost.conf')\n v = open(vhost, 'r')\n temp = os.path.join(os.path.dirname(__name__), 'tmp', 'new.conf')\n n = open(temp, 'w')\n\n for each in v.readlines():\n if '{PATH_TO_PROJECT_ROOT}' in each:\n n.write(each.replace('{PATH_TO_PROJECT_ROOT}', PATH_TO_PROJECT_ROOT))\n elif '{PORT}' in each:\n if PORT in ports and 'Listen' in each:\n n.write('#Port already open...')\n n.write('\\n')\n # n.write(each.replace('{PORT}', PORT))\n else:\n n.write(each.replace('{PORT}', PORT))\n elif '{PATH_TO_STATIC}' in each:\n n.write(each.replace('{PATH_TO_STATIC}', PATH_TO_STATIC))\n elif '{PATH_TO_DIVINEBA_WSGI}' in each:\n n.write(each.replace('{PATH_TO_DIVINEBA_WSGI}', PATH_TO_DIVINEBA_WSGI))\n elif '{PATH_TO_PUBLIC_DIR}' in each:\n n.write(each.replace('{PATH_TO_PUBLIC_DIR}', PATH_TO_PUBLIC_DIR))\n else:\n try:\n if each.split()[0] in unneeded:\n continue\n else:\n n.write(each)\n except:\n n.write(each)\n\n v.close()\n n.close()\n\n #append at end of the httpd.conf....local = original....remote = delete original and put\n\n conf = open(httpd, 'a')\n n = open(temp, 'r')\n conf.write('\\n\\n')\n for each in n.readlines():\n conf.write(each)\n\n n.close()\n conf.close()\n\n if self.SHELL == 'Remote':\n cmd = 'rm -f ' + apache_conf_file_path\n self.run_task(cmd)\n put(httpd, apache_conf_file_path, mode=0755)\n os.remove(httpd)\n\n os.remove(temp)\n\n except Exception as e:\n print str(e)\n raise Exception(e)\n\n cmd = 'create virtualhost'\n out = 'Config Created Successfully!'\n res = [cmd, out.split('\\n')]\n if queue:\n queue.put(res)", "def configure_httpd_service_ipa_conf(self):\n raise NotImplementedError()", "def generate_wsgi_conf():\n _require_environment()\n\n # Dictionary for interpolating template\n variables = {\n 'project': env.project['project'],\n 'settings': env.project['settings'],\n 'site_packages': SITE_PACKAGES_DIR % _get_python_version(),\n }\n _generate_conf('wsgi.py', variables)", "def redefine_airflow_workspaces(self, workspaces):\n dst = _app_config_file()\n new_config = (\n pyhocon.ConfigFactory.parse_string(\n \"aiscalator.airflow.setup.workspace_paths = [\\n\" +\n \"\\n\".join([ws for ws in workspaces]) +\n \"]\"\n )\n ).with_fallback(_app_config_file(), resolve=False)\n with open(dst, \"w\") as output:\n output.write(\n pyhocon.converter.HOCONConverter.to_hocon(new_config)\n )\n self._app_conf = new_config\n return new_config", "def update_submodules(options, project_directory=None):\n pass", "def update_project(self, name):\n self._log.info(\"Updating project: {}\".format(name))\n if name in self.projects:\n pass\n else:\n self.add_project(name)", "def test_replace_project(self):\n pass", "def _bootstrap():\r\n import os\r\n import sys\r\n \r\n pwd = os.path.dirname(__file__)\r\n \r\n (parent_directory, project_name) = os.path.split(pwd)\r\n \r\n # protect template itself from being bootstrapped\r\n if project_name == 'django_project_template':\r\n abort('bootstrap should not be run on project template!')\r\n\r\n env.project_name = project_name\r\n env.project_domain = env.project_name.split('.')[0].replace('_','-')\r\n \r\n def replace_in_files(path, find, replace):\r\n \r\n import fileinput\r\n \r\n if os.path.isfile(path):\r\n for line in fileinput.input(path, inplace=1):\r\n if find in line:\r\n line = line.replace(find, replace)\r\n sys.stdout.write(line)\r\n \r\n if os.path.isdir(path):\r\n # do not replace in virtual env\r\n if os.path.split(path)[1] == env.virtualenv_dir:\r\n return\r\n for f in os.listdir(path):\r\n replace_in_files(os.path.join(path, f), find, replace)\r\n\r\n # 'escape' placeholders here to protect them from being replaced\r\n replace_in_files(pwd, '@PROJECT_NAME' + '@', env.project_name)\r\n replace_in_files(pwd, '@PROJECT_DOMAIN' + '@', env.project_domain)", "def sync_config():\n rsync_project(remote_dir='/apps/sharejs-rethinkdb-example/config/', local_dir='./config/')", "def wsgi_conf():\n\n get_details()\n\n site_dir = posixpath.join(env.paths[\"sites\"], env.site_name)\n if not exists(site_dir):\n run(\"mkdir -p %s\" % site_dir)\n\n filename = \"%s_wsgi.py\" % env.project_name\n\n context = {\n \"site_name\": env.site_name,\n \"project_name\": env.project_name,\n \"python_version\": env.python_version,\n \"paths\": env.paths,\n }\n\n # Set up the wsgi dir.\n if env.app_server=='apache':\n wsgi_dir = posixpath.join(site_dir, \"apache\")\n else:\n wsgi_dir = posixpath.join(site_dir, \"src/src-%s\" % env.project_name)\n\n with cd(wsgi_dir):\n if not exists(filename):\n print \"Template path: %s\" % JINJA_TEMPLATE_PATH\n upload_template(\"wsgi_conf_%s.txt\" % env.app_server,\n filename,\n context,\n use_jinja=True,\n template_dir=JINJA_TEMPLATE_PATH)\n else:\n\t\t\t#TODO: If it exists, append to it\n print \"This file already exists.\"\n return\n run(\"chmod 654 %s\" % filename)", "def startproject(self):\n\n path = os.path.join(self.path, self.project_name)\n if os.path.exists(path):\n raise exceptions.ProjectDirectoryAlreadyExistsError(self.project_name)\n else:\n os.makedirs(path)\n\n context = {\n 'project_name': self.project_name,\n 'default_region': self.region,\n 'random': hashlib.sha1(six.text_type(random.random()).encode('utf-8')).hexdigest()[:8]\n }\n\n self._clone_defaults(\n os.path.join(self.root, 'defaults', 'project'),\n path,\n context\n )", "def project_refresh(project_name):\n if not db_find_project(project_name):\n abort(404)\n analyser.add_repos(current_user.username, [project_name])\n return redirect(url_for('main.admin_manage'))", "def install_mod_wsgi(venv_directory, project_directory, host):\n print(\"Installiere mod_wsgi und richte Apache ein...\")\n _run(f\"{venv_directory}/bin/pip install -q mod_wsgi\")\n cmd = _run(\n f\"sudo {venv_directory}/bin/mod_wsgi-express install-module\",\n capture_output=True\n )\n with open('mod_wsgi.load', 'w') as f:\n f.write(cmd.stdout.decode())\n _run(\"sudo mv mod_wsgi.load /etc/apache2/mods-available/mod_wsgi.load\")\n _run(\"sudo a2enmod -q mod_wsgi macro\")\n with open('mizdb.conf', 'w') as f:\n f.write(\n site_config.format(\n host=host,\n venv_directory=venv_directory,\n project_directory=project_directory\n )\n )\n _run(\"sudo mv mizdb.conf /etc/apache2/sites-available/mizdb.conf\")\n _run(\"sudo a2ensite -q mizdb\")\n print(\"Apache neustarten...\")\n _run(\"sudo -k service apache2 restart\")", "def upgrade_project(ctx, path):\n with ctx.cd(path):\n ctx.run(\"newt upgrade\")", "def update_site(env='development', update_settings='n', upgrade_apps='n'):\n update_project(env, update_settings)\n update_apps(env, upgrade_apps)", "def updateProjects(request):\n\n updater = ProjectUpdater()\n updater.run()\n return http.HttpResponse(\"Ok\")", "def update_config(self, config):\n p.toolkit.add_template_directory(config, \"templates\")\n p.toolkit.add_public_directory(config, 'public')", "def edit_httpdConf():\n t2 = sp.Popen(\n [\n '/opt/OAM/oracle/product/11.1.1/as_1/webgate/ihs/tools/setup/InstallTools/EditHttpConf -f /opt/WebSphere/HTTPServer/conf/httpd.conf -w /opt/OAM/oracle/Middleware/Oracle_OAMWebGate1 -oh /opt/OAM/oracle/product/11.1.1/as_1/ -ws ihs'\n ],\n shell=True,\n stdout=sp.PIPE,\n stderr=sp.PIPE\n )\n stdout_value, stderr_value = t2.communicate()", "def amend_project_fedora(project_id, version=None):\n container_path = project_id\n if version:\n container_path = '{}v{}'.format(container_path, str(version))\n\n create_fc_version(container_path)\n project_meta = format_metadata_for_fedora(project_id)\n res = fedora_update(container_path, project_meta)\n return res", "def update_config(self, config):\n p.toolkit.add_template_directory(config, 'templates')\n p.toolkit.add_public_directory(config, 'public')", "def update_site():\n site_path = os.path.join(PROJECTS_ROOT, CURRENT_SITE)\n docs_path = os.path.join(site_path, 'doc_src')\n with cd(site_path):\n run('git pull --all')\n run('workon djangopatterns && pip install -r %s/setup/requirements.txt' % site_path)\n run('workon djangopatterns && %s/manage.py syncdb' % site_path)\n # run('workon djangopatterns && %s/manage.py migrate' % site_path)\n run('workon djangopatterns && %s/manage.py collectstatic --noinput' % site_path)\n run('workon djangopatterns && %s/manage.py compress' % site_path)\n with cd(docs_path):\n run('git pull --all')\n # run('workon djangopatterns && cd doc_src && make clean')\n # run('workon djangopatterns && cd doc_src && make json')\n reload_site()", "def update():\n require('PROJECT_NAME')\n\n with cd(utils.home('apps', env.PROJECT_NAME)):\n run('hg pull')\n run('hg up')", "def rewrite_url(url, project=None):\n parts = list(urlparse.urlsplit(url))\n new_domain = get_preferred_domain(project=project, default_to_appid=False)\n if new_domain:\n parts[1] = new_domain\n return urlparse.urlunsplit(parts)", "def refresh_wsgi():\n\n require(\"wsgi_path\", \"sudo_user\")\n cmd = \"touch -c %s\" % env.wsgi_path\n sudo(cmd, user=env.sudo_user)", "def put_settings_files(env='development'):\n projects = build_projects_vars()\n project = projects[env]\n if exists('%(dir)s/%(inner_dir)s' % project):\n put(project['settings_path'], '%(dir)s/%(inner_dir)s/local_settings.py' % project)\n if env == 'production':\n with cd('%(dir)s/%(inner_dir)s' % project):\n sed('local_settings.py', '^DEBUG = True$', 'DEBUG = False')", "def modifyconfigs(path_dirs, new_conf):\n for template in new_conf:\n config = ConfigParser.RawConfigParser()\n config.read(path_dirs.template_dir+template)\n for (section, option, value) in new_conf[template]:\n if not config.has_section(section):\n config.add_section(section)\n config.set(section, option, value)\n with open(path_dirs.template_dir+template, 'w') as configfile:\n config.write(configfile)", "def bootstrap():\n nginx_config = path.join(get_paths(NAME_CONFIGS_DIR)[1], 'nginx.conf')\n path_sites = '/etc/nginx/sites-enabled/'\n\n rm(path.join(path_sites, 'default'), use_local=False)\n sudo(get_symlink_command(nginx_config, '%s%s.conf' % (path_sites, PROJECT_NAME)))\n\n reload()", "def _update_site_configuration(self):\n self.site.configuration.site_values = {'THIRD_PARTY_AUTH_ONLY_DOMAIN': self.email_domain_name}\n self.site.configuration.save()", "def update_project_name(self, curr_proj, proj_new_name):\r\n for proj in self.__projects:\r\n if proj == curr_proj: # Find the project with the same current name\r\n proj.update_name(proj_new_name) # Update the project's name\r", "def test_config_set(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('config set project name \"Test project\"')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)\n self.assertEqual('Test project',\n self.env.config.get('project', 'name'))", "def test_patch_project(self):\n pass", "def _create_namespace(self):\n self.ocp.new_project(self.namespace)", "def touch():\n run('touch %s' % PATH_SEP.join((env.code_root, 'mwana', 'malawi', 'apache',\n 'project.wsgi')))", "def update_config(self, config):\n toolkit.add_template_directory(config, 'templates')\n toolkit.add_public_directory(config, 'public')\n toolkit.add_resource('fanstatic', 'syngenta')", "def link_config_files():\n\n require('environment', provided_by=env.environments)\n with settings(warn_only=True):\n sudo('rm /etc/nginx/sites-enabled/default')\n sudo('rm /etc/nginx/sites-enabled/%(project)s-*.conf' % env)\n sudo('rm /etc/supervisor/conf.d/%(project)s-*.conf' % env)\n sudo('ln -s /home/%(deploy_user)s/services/nginx/%(environment)s.conf /etc/nginx/sites-enabled/%(project)s-%(environment)s.conf' % env)\n sudo('ln -s /home/%(deploy_user)s/services/supervisor/%(environment)s.conf /etc/supervisor/conf.d/%(project)s-%(environment)s.conf' % env)", "def set_project(project_id):\n return fluent.set_project(project_id)", "def setup_project():\n _require_environment()\n\n # Checks if needed conf files for this environment already exist\n if not os.path.exists(_interpolate('%(settings)s.py')):\n abort(_interpolate('There is no settings.py for %(environment)s - create one, and commit'))\n if not os.path.exists(_interpolate('config/apache_%(environment)s.conf')):\n abort(_interpolate('There is no Apache conf for %(environment)s - use task \"generate_apache_conf\" to generate one, and commit'))\n if not os.path.exists(_interpolate('config/wsgi_%(environment)s.py')):\n abort(_interpolate('There is no WSGI conf for %(environment)s - use task \"generate_wsgi_conf\" to generate one, and commit'))\n\n # Configures virtualenv and clones git repo\n _setup_virtualenv()\n _clone_gitrepo()\n\n # Issues extra commands at project's level, if any\n extra_commands()\n\n # Sets up Apache, MySQL\n _setup_project_apache()\n _drop_database_mysql()\n _setup_project_mysql()\n\n # Finish installation\n pip_install()\n update_project()", "def project():", "def project():", "def project():", "def set_projects(self, name_short, name, disc_path):\n if name not in conf.projects:\n pass # TODO add the project in the conf\n else:\n return \"Project already exist\"\n return self.datas.create_path(disc_path)", "def apache(self):\n self.summarize_operation(\"Installing Apache Web Server\")\n self.install_package(\"apache2\")", "def test_config_get(self):\n test_name = sys._getframe().f_code.co_name\n self.env.config.set('project', 'name', 'Test project')\n rv, output = self._execute('config get project name')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def update(self, oid, name=None, domain=None, enabled=None, \n description=None):\n data = {\"project\": {}}\n \n if name is not None:\n data['project']['name'] = name\n if domain is not None:\n data['project']['domain_id'] = domain\n if enabled is not None:\n data['project']['enabled'] = enabled\n if description is not None:\n data['project']['description'] = description\n \n path = '/projects/%s' % oid\n res = self.client.call(path, 'PATCH', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Update openstack project: %s' % truncate(res))\n return res[0]['project']", "def apply_config(self, responsible, paths, arg=None):\n self.warning(\"Reconfiguring NTP server (called with paths %s)\" % paths)\n return self.updateRunningConf(responsible)", "def set_project(\n name\n):\n if not is_alive():\n err_msg = \"Cannot connect to getML engine. Make sure the engine is running and you are logged in.\"\n raise ConnectionRefusedError(err_msg)\n\n cmd = dict()\n cmd[\"type_\"] = \"set_project\"\n cmd[\"name_\"] = name\n\n comm.send(cmd)", "def update(self):\n with settings(user=self.serviceUser):\n self.venv.create()\n\n self.venv.install_twisted()\n self.venv.install(\" \".join(\"\"\"\n psycopg2==2.7.5\n pygments==2.2.0\n spambayes==1.1b3\n trac==1.2.2\n trac-github==2.3\n requests_oauthlib==1.0.0\n svn+https://svn.edgewall.org/repos/trac/plugins/1.2/spam-filter@15310\n git+https://github.com/twisted-infra/twisted-trac-plugins.git\n \"\"\".split()))\n\n # This is txacme v2 but is not yet released.\n # Should be replaced on we have txacme v2.\n # See https://github.com/twisted/txacme/pull/158\n self.venv.install(\n \"--index=https://pypi.chevah.com/simple txacme==1.0.0.chevah4\")\n\n run('mkdir -p ' + self.configDir)\n put(os.path.dirname(__file__) + '/*', self.configDir,\n mirror_local_mode=True)", "def configure(conf): # pylint: disable=too-many-branches,too-many-locals\n conf.env.VERSION = VERSION\n conf.env.APPNAME = APPNAME\n conf.msg(\"Project\", f\"{conf.env.APPNAME}-{conf.env.VERSION}\")\n conf.load(\"python\")\n conf.check_python_version((3, 6))\n conf.undefine(\"PYTHONDIR\")\n conf.undefine(\"PYTHONARCHDIR\")\n\n base_err_msg = (\n \"wscript's VERSION attribute ({}) and version information in file {} \"\n \"({}) do not match.\"\n )\n\n version_file = conf.path.find_node(\"VERSION\")\n version_info = version_file.read_json()\n version_file_ver = version_info[\"native Lua\"]\n if not VERSION == version_file_ver:\n conf.fatal(base_err_msg.format(VERSION, version_file, version_file_ver))\n\n conf.env.lua_src_version = version_info[\"lua\"]\n conf.env.lua_tests_version = version_info[\"tests\"]\n conf.msg(\"native Lua version\", VERSION)\n conf.msg(\"Lua version\", conf.env.lua_src_version)\n conf.msg(\"Lua tests version\", conf.env.lua_tests_version)\n conf.env.generic = conf.options.generic\n conf.msg(\"Platform\", conf.options.generic or PLATFORM)\n conf.load(\"gnu_dirs\")\n\n conf.env.WAF_CONFIG_H_PRELUDE = (\n conf.path.find_node(os.path.join(\"cfg\", \"prelude.h.template\"))\n .read()\n .replace(\"{{ VERSION }}\", VERSION)\n .replace(\"{{ REPO_URL }}\", REPO_URL)\n )\n conf.write_config_header(configfile=\"waf_build_config.h\")\n platform_configs = conf.path.find_node(\n os.path.join(\"cfg\", \"platforms.json\")\n ).read_json()\n is_known = platform_configs[\"known-platforms\"].get(PLATFORM, False)\n if not is_known:\n pass # TODO\n if conf.options.generic:\n pass # TODO\n\n schema_compiler_setup = conf.path.find_node(\n os.path.join(\"cfg\", \"compiler-cfg.schema.json\")\n ).read_json()\n cfgs = conf.path.ant_glob(\n \"cfg/**/*.json\",\n excl=[\"**/*.schema.json\", \"cfg/generic.json\", \"cfg/platforms.json\"],\n )\n Logs.debug(\", \".join(i.relpath() for i in cfgs))\n for i in cfgs:\n valid = validate_json_schema(i.read_json(), schema_compiler_setup)\n if not valid:\n Logs.warn(f\"{i.relpath()} is not a valid compiler setup.\")\n generic_build = conf.path.find_node(os.path.join(\"cfg\", \"generic.json\")).read_json()\n for _, v in generic_build.items():\n validate_json_schema(v, schema_compiler_setup)\n\n conf.load(\"compiler_c\")\n\n # load platform-compiler configuration\n cc_config_file = os.path.join(\n \"cfg\", PLATFORM, f\"{PLATFORM}_{conf.env.CC_NAME}.json\"\n )\n cc_config = conf.path.find_node(cc_config_file).read_json()\n for i, val in cc_config.items():\n if i.isupper() or \"_PATTERN\" in i:\n conf.env[i] = val\n # add the build directory to includes as it stores the configuration file\n conf.env.append_unique(\"INCLUDES\", [conf.path.get_bld().abspath()])\n\n # validate C standard setting\n conf.env.C_STD = cc_config[\"std\"][\"opt\"] + cc_config[\"std\"][\"val\"]\n if conf.options.c_std: # setting might be overwritten on commandline\n conf.env.C_STD = conf.options.c_std\n conf.env.append_unique(\"CFLAGS\", [conf.env.C_STD])\n if \"89\" in conf.env.C_STD:\n if PLATFORM == \"win32\" and conf.env.CC_NAME.lower() == \"msvc\":\n Logs.warn(\"This will NOT effect msvc-builds on win32.\")\n else:\n Logs.warn(\n \"C89 does not guarantee 64-bit integers for Lua.Adding define: LUA_USE_C89\"\n )\n Logs.warn(\"Adding define: LUA_USE_C89\")\n conf.define(\"LUA_USE_C89\", 1) # TODO check for waf update\n\n min_c = \"#include<stdio.h>\\nint main() {\\n return 0;\\n}\\n\"\n\n lib_tests = []\n for lib in cc_config.get(\"libs\", []):\n lib_tests.append(\n {\n \"lib\": lib,\n \"uselib_store\": lib.upper(),\n \"msg\": f\"Checking for library '{lib}'\",\n }\n )\n\n conf.multicheck(\n {\"fragment\": min_c, \"execute\": True, \"msg\": \"Minimal C program\"},\n {\n \"fragment\": min_c,\n \"execute\": True,\n \"cflags\": conf.env.C_STD,\n \"msg\": f\"Checking c-standard '{conf.env.C_STD}'\",\n },\n *lib_tests,\n {\n \"fragment\": min_c,\n \"execute\": True,\n \"cflags\": conf.env.C_STD,\n \"use\": [i.upper() for i in cc_config.get(\"libs\", [])],\n \"msg\": \"Checking for all libraries\",\n },\n msg=\"Validating compiler setup\",\n mandatory=True,\n run_all_tests=True,\n )\n if cc_config.get(\"libs\", []):\n conf.env.USE_LIBS = [i.upper() for i in cc_config[\"libs\"]]", "def apache_projects():\n display = Display(visible=0, size=(800, 800)) \n display.start()\n # path to where I have chrome driver installed\n path_to_chromedriver = '/usr/local/bin/chromedriver'\n # initialize the driver\n driver = webdriver.Chrome(executable_path=path_to_chromedriver)\n # go to the apache projects page\n driver.get('https://projects.apache.org/projects.html')\n # wait for the list of projects to load\n time.sleep(2)\n\n # get the HTML element with id list\n elem = driver.find_element_by_id('list')\n project_list = elem.text.split(\"\\n\")\n # initialize an instance of Projects\n projects = Projects()\n\n for i in range(1, len(project_list) + 1):\n # Get the url of each project\n project_xpath = '//*[@id=\"list\"]/ul/li[%d]/a' %i\n # Get the HTML element that for the current project\n project_link = driver.find_element_by_xpath(project_xpath)\n project_name = project_link.text\n\n # Open the project page\n driver.get(project_link.get_attribute(\"href\"))\n # Wait for project page to load\n time.sleep(0.5)\n\n inception = get_inception(driver)\n description = get_description(driver, project_name)\n\n # get the name without \"Apache\", make it lowercase, and add dashes\n stripped_name = \"-\".join(project_name.lower().split(\" \")[1:]).encode('utf-8')\n github_mirror = \"http://github.com/apache/\" + stripped_name\n\n # see if there's anything at the github url that was generated\n resp = httplib2.Http().request(github_mirror, 'HEAD')\n # this means the github repo with the parsed url doesn't exist\n if int(resp[0]['status']) >= 400:\n github_mirror = \"N/A\"\n\n # Add extra attributes to the JSON\n description[\"github\"] = github_mirror\n description[\"company\"] = \"Apache Software Foundation\"\n description[\"name\"] = project_name\n description[\"day\"] = inception[\"day\"]\n description[\"month\"] = inception[\"month\"]\n description[\"year\"] = inception[\"year\"]\n\n projects.add(project_name, description)\n\n # Reset the driver\n driver.get('https://projects.apache.org/projects.html')\n time.sleep(0.8)\n\n return projects", "def changeOwn():\n os.system('sudo chown -R test:users /etc/resolv.conf')\n os.system('sudo chown -R test:named /etc/named.conf')", "def maintenance_up():\n sudo('cp -T %(repo_path)s/apache/%(settings)s/apache_maintenance %(apache_config_path)s' % env)\n reboot()", "def add_webserver_virtual_host(self):\n\n context = {\n \"app\": self.app_name,\n \"gunicorn_port\": config.get(\"gunicorn_port\", 8000),\n \"server_port\": config.get(\"server_port\", 80)\n }\n\n server_name = config.get(\"server_name\")\n if server_name:\n context[\"server_name\"] = \"server_name {0};\".format(server_name)\n\n upload_template(\"templates/nginx_vhost.conf\", \"/etc/nginx/sites-available/{0}.conf\".format(self.app_name), context=context, overwrite=True)\n\n with settings(warn_only=True):\n run(\"rm /etc/nginx/sites-enabled/default\")\n run(\"rm /etc/nginx/sites-enabled/{0}.conf\".format(self.app_name))\n\n run(\"ln -s /etc/nginx/sites-available/{app}.conf /etc/nginx/sites-enabled/{app}.conf\".format(app=self.app_name))\n run(\"service nginx restart\")", "def test_rename_python_api(self):\n\n rename.rename([NEW_APP_NAME, NEW_DOMAIN])\n self.assertTrue(os.path.exists(RENAMED_PROJECT_DIR))", "def update_urls_file(self, app_name):\n\n logger.info(\"\\n--------------------------------------------------------\\n\\t\\tRefreshing application list in urls.py\")\n copyfile(settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py\", settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py.backup\")\n t = loader.get_template('applicationManager/applicationFileTemplates/project_urls_py.txt')\n\n apps = Application.objects.all()\n\n c = {'applist': apps}\n rendered = t.render(c)\n open(settings.SITE_ROOT + \"/\" + settings.APPLICATION_NAME + \"/urls.py\", \"w+\").write(rendered)", "def change_config(self, repo):\n with repo.config_writer() as config:\n url = ('https://' + str(self.user.username) + ':' +\n str(self.get_user_token()) + '@github.com/' +\n str(self.user.username) + '/' + self.repo + '.git')\n config.set_value('remote \"origin\"', 'url', url)\n config.set_value('user', 'email', '[email protected]')\n config.set_value('user', 'name', 'Ranvir Singh')\n return config", "def add_local_settings():\n put('/Users/peter/Dropbox/Projects/ChromeFiddle/Local\\ Settings/prod/local_settings.py', \n '/home/django/web/chromefiddle/chromefiddle/settings')", "def test_config_remove(self):\n test_name = sys._getframe().f_code.co_name\n self.env.config.set('project', 'name', 'Test project')\n rv, output = self._execute('config remove project name')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)\n self.assertEqual('My Project', self.env.config.get('project', 'name'))", "def newproject():\n log('Criando novo projeto', yellow)\n log('Cria a conta no bitbucket com o nome do projeto vázio que o script se encarregará do resto', red)\n\n conta = raw_input('Digite o nome do projeto: ')\n\n local('echo \"clonando projeto %s\"' % bitbucket_repository)\n local('git clone {0} {1}{2}'.format(bitbucket_repository, folder_project_local, conta))\n local('cd {0}{1}'.format(folder_project_local, conta))\n local('mkvirtualenv {0}'.format(conta))\n local('setvirtualenvproject')\n local('pip install -r requirements.txt')\n local('rm -rf {0}{1}/.git'.format(folder_project_local, conta))\n local('rm -rf README.md')\n local('git init')\n local('git remote add origin [email protected]:{0}/{1}.git'.format(bitbucket_user, conta))", "def configuration_view(project):\n project_query = Project.select().where(Project.slug == project).first()\n if project_query is None:\n flash(\"invalid project\")\n return redirect(url_for(\"projects\"))\n session[\"project\"] = project_query\n\n g.selected_tab = \"configuration\"\n\n settings = None\n if request.method == \"GET\":\n settings = Anemone.abcfile.parse(path(project_query.path, \"build.abc\"))\n elif request.method == \"POST\":\n configuration_post(project_query, request)\n\n return render_template(\"configure.html\", ssh=open(app.config[\"SSH_PUBLIC\"]).readline(),\n build=settings, unity=app.config[\"UNITY_PATH\"])", "def main(no_dev: bool):\n is_dev = not no_dev\n rewrite_pyproject(is_dev)\n if is_dev:\n make_dev_pyproject()", "def update_url(self, project: str, new_url: str) -> dict:\n assert self.exists(project), f'Project {project} inesistente'\n assert not self.exists(new_url), f'Project \"{new_url}\" già esistente'\n\n return self.collection.find_one_and_update(\n {\n 'url': project\n },\n {\n '$set': {\n 'url': new_url,\n }\n }\n )", "def copy_project(self, new_name, switch=True):\n if new_name in self:\n raise ValueError(\"Project {} already exists\".format(new_name))\n fp = self._base_data_dir / safe_filename(new_name, full=self.dataset.full_hash)\n if fp.exists():\n raise ValueError(\"Project directory already exists\")\n project_data = ProjectDataset.get(ProjectDataset.name == self.current).data\n ProjectDataset.create(\n data=project_data, name=new_name, full_hash=self.dataset.full_hash\n )\n shutil.copytree(self.dir, fp)\n create_dir(self._base_logs_dir / safe_filename(new_name))\n if switch:\n self.set_current(new_name)", "def restartHTTPd(htconf):\n parentpid = pidHTTPd(htconf)\n if parentpid <= 1:\n return\n# hopefulle killing the parent proc. will do the trick\n print >> FileKeyUtils.WMSlog, 'restartHTTPd> kill parentpid:', parentpid\n os.system('kill -TERM '+repr(parentpid))\n apache = '/devstore/apache2/bin/httpd -f /devstore/apache2/conf/' + htconf\n print >> FileKeyUtils.WMSlog, 'restartHTTPd> via:', apache\n time.sleep(0.5) # give it time to complete proc. termination\n os.system('/devstore/apache2/bin/httpd -f /devstore/apache2/conf/' + htconf)", "def test_replace_namespaced_build_config(self):\n pass", "def project(self, value):\n\n if self._project != value:\n self._project = value\n self._update_page()", "def deploy(env='development', update_settings='n', upgrade_apps='n'):\n update_site(env, update_settings, upgrade_apps)\n restart_site(env)", "def http(c, path=local.http_path, port=local.http_port):\r\n c = conn(c)\r\n print(\"make http repo on {}, path [{}]\".format(c.host, path))\r\n\r\n \"\"\" 准备\r\n \"\"\"\r\n system.install(c, 'httpd createrepo')\r\n c.run('mkdir -p {path}'.format(path=path))\r\n\r\n \"\"\" 配置\r\n \"\"\"\r\n c.run('''\r\n cd {home}; mkdir -p save\r\n cp -f conf/httpd.conf save\r\n mv conf.d/welcome.conf save\r\n rm conf.d/local.conf -rf'''.format(home=local.http_home))\r\n\r\n c.run('''cat << EOF > {host}\r\n<VirtualHost *:{port}>\r\n DocumentRoot \"{path}\"\r\n <Directory \"{path}\">\r\n Options Indexes FollowSymLinks\r\n AllowOverride None\r\n Require all granted\r\n </Directory>\r\n</VirtualHost>\r\nEOF'''.format(host=local.http_host, path=path, port=port))\r\n\r\n if port != local.http_port:\r\n sed.append(hosts.conn(2), 'Listen {port}'.format(port=port), 'Listen 80', local.http_conf)\r\n print(\"set http port [{}]\".format(port))\r\n\r\n \"\"\" 配置:\r\n root path不要配置在 /tmp下,无法识别\r\n httpd -t \r\n \"\"\"\r\n\r\n if globing.invoke:\r\n c.run('''cat << EOF > /start.sh\r\n#!/bin/bash\r\necho \"start httpd ... [`date`]\"\r\n\r\n#mkdir -p /run/httpd\r\nfor count in {1..5} \r\ndo \r\n echo \"start $count\"\r\n httpd -DFOREGROUND\r\n sleep 1\r\ndone\r\nEOF''')\r\n else:\r\n c.run('systemctl restart httpd')", "def redefine_app_config_home(self, config_home):\n dst = _app_config_file()\n new_config = (\n pyhocon.ConfigFactory.parse_string(\n \"aiscalator.app_config_home_directory = \" + config_home\n )\n ).with_fallback(_app_config_file(), resolve=False)\n with open(dst, \"w\") as output:\n output.write(\n pyhocon.converter.HOCONConverter.to_hocon(new_config)\n )\n self._app_conf = new_config\n return new_config", "def update_project(builder):\r\n\r\n projectfile = join(THISDIR, \"ringo-wp8.csproj\")\r\n\r\n dom = parse(projectfile)\r\n Languages = getattr(builder.CustomCfg, \"Languages\", None )\r\n\r\n if not Languages is None:\r\n Languages = [lan.replace('en-US', 'en') for lan in Languages]\r\n print \"Modified languages\", \",\".join( Languages )\r\n \r\n Languages = [] if Languages is None else Languages\r\n update_project_with_values(dom,\r\n Languages = Languages)\r\n\r\n with open(projectfile, 'wb') as f:\r\n data = dom.toprettyxml(indent = \" \")\r\n # toprettyxml adds extra new lines\r\n lines = [ x for x in data.split(\"\\n\") if len(x.strip()) > 0]\r\n data = \"\\n\".join(lines)\r\n f.write(data)\r\n\r\n if len(Languages) > 0 :\r\n default_language = Languages[0]\r\n if default_language != \"en\" and default_language.lower() != \"en-us\" :\r\n temppath = join(THISDIR, \"src\", \"MobileSecurity\",\"resources\");\r\n print \"Renaming: \", temppath\r\n try:\r\n os.remove(join(temppath,\"Localized.en.resx\"))\r\n except:\r\n pass\r\n os.rename(join(temppath,\"Localized.resx\"), join(temppath,\"Localized.en.resx\"))\r\n try:\r\n os.remove(join(temppath, \"Localized.resx\"))\r\n except:\r\n pass\r\n os.rename(join(temppath,\"Localized.%s.resx\" %(default_language)), join(temppath, \"Localized.resx\"))", "def update_config(self, config):\n # add follower public folder to the CKAN's list of public folders\n here = os.path.dirname(__file__)\n public_dir = os.path.join(here, 'public')\n if config.get('extra_public_paths'):\n config['extra_public_paths'] += ',' + public_dir\n else:\n config['extra_public_paths'] = public_dir\n # add follower template folder to the CKAN's list of template folders\n template_dir = os.path.join(here, 'templates')\n if config.get('extra_template_paths'):\n config['extra_template_paths'] += ',' + template_dir\n else:\n config['extra_template_paths'] = template_dir", "def deploy_config():\n run('cp {}/tools/WebGrab++.config.xml {}'.format(env.repo_dir, env.wg_dir))", "def _editSysconfig():\n dbUrl = \"jdbc:postgresql://\" + getDbHostName() + \":\" + getDbPort() + \"/\" + basedefs.DB_NAME\n if \"DB_SECURE_CONNECTION\" in controller.CONF.keys() and controller.CONF[\"DB_SECURE_CONNECTION\"] == \"yes\":\n dbUrl = dbUrl + \"?ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory\"\n\n proxyEnabled = utils.compareStrIgnoreCase(controller.CONF[\"OVERRIDE_HTTPD_CONFIG\"], \"yes\")\n utils.editEngineSysconfig(proxyEnabled=proxyEnabled,\n dbUrl=dbUrl,\n dbUser=utils.getDbUser(),\n fqdn=controller.CONF[\"HOST_FQDN\"],\n http=controller.CONF[\"HTTP_PORT\"],\n https=controller.CONF[\"HTTPS_PORT\"],\n javaHome=controller.CONF[\"JAVA_HOME\"])", "def newdev():\n log('Configura uma computador Ubuntu para trabalhar python/django', yellow)\n update_local()\n upgrade_local()\n\n # pacotes\n build_local()\n python_local()\n mysql_local()\n git_local()\n\n # atualizando\n update_local()\n upgrade_local()", "def projectDir(self, path):\n logger.debug(\"Func: projectDir/setter\")\n self._pathsDict[\"projectDir\"] = path\n # self.init_paths()\n # self.init_database()", "def test_update_project(self):\n pass", "def test_update_project(self):\n pass", "def work_in_example_project(request):\n return chdir_in_and_out(request, LoslassaProject.EXAMPLE_PROJECT)", "def test_replaceProjectVersion(self):\n replaceProjectVersion(\"test_project\",\n Version(\"twisted.test_project\", 0, 82, 7))\n ns = {'__name___': 'twisted.test_project'}\n execfile(\"test_project\", ns)\n self.assertEquals(ns[\"version\"].base(), \"0.82.7\")", "def test_patch_namespaced_build_config(self):\n pass", "def change_project(self, project, project_format='id'):\n name = 'tenant' if self.api_version == 2 else 'project'\n self.creds['%s_%s' % (name, project_format)] = project\n opposite_format = 'name' if project_format == 'id' else 'id'\n del self.creds['%s_%s' % (name, opposite_format)]" ]
[ "0.6472111", "0.6156051", "0.6123365", "0.5980424", "0.5979637", "0.5878512", "0.57706547", "0.56331587", "0.55369204", "0.5507125", "0.54162174", "0.5402613", "0.53572744", "0.52999634", "0.5277135", "0.52048105", "0.5179632", "0.5174573", "0.5156144", "0.514356", "0.5130455", "0.5111259", "0.50954354", "0.50872135", "0.5075139", "0.50248396", "0.5013426", "0.49799138", "0.49690223", "0.49642625", "0.4960139", "0.49427053", "0.49411944", "0.4915059", "0.49045846", "0.48729226", "0.4864048", "0.484672", "0.48443484", "0.48430958", "0.48317957", "0.4827", "0.48255977", "0.48239705", "0.4809373", "0.48006123", "0.47721443", "0.4769838", "0.4767963", "0.47587648", "0.47378287", "0.47329032", "0.47269407", "0.47197518", "0.47168753", "0.47157717", "0.47156143", "0.47126675", "0.47126675", "0.47126675", "0.47081006", "0.4706081", "0.46984977", "0.46916896", "0.46915364", "0.46905324", "0.4690405", "0.46861663", "0.46799108", "0.46795845", "0.46783367", "0.4677854", "0.46733722", "0.4654399", "0.46515185", "0.4643623", "0.46427438", "0.46331704", "0.46322373", "0.46296284", "0.4628992", "0.46285343", "0.46279916", "0.46210057", "0.46192822", "0.46103242", "0.4603683", "0.4599898", "0.45978495", "0.45960954", "0.45909634", "0.4587774", "0.45813814", "0.45797548", "0.45775962", "0.45775962", "0.4573675", "0.45727825", "0.45695287", "0.4568345" ]
0.72978866
0
Clones the git repo into the new webapp, deleting the default myproject project and updating the config file to point to the new project. Also adds a site_settings.py file to the project/project folder.
def clone_into_project(git_repo_name): repo_dir = git_dir + "/%s.git" % git_repo_name with cd(remote_dir): run('rm -rf myproject') run("git clone %s %s" % (repo_dir, project_name)) run("echo 'MY_ENV=\"prod\"' > %s/%s/site_settings.py" % (project_name,project_name)) update_conf_file()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_project():\n _require_environment()\n\n # Grants write rights on log dir for the admin group\n log_dir = '%s/log' % _interpolate(VIRTUALENV_DIR)\n if files.exists(log_dir):\n sudo('chmod -R g+w %s' % log_dir)\n\n # Updates from git, issues Django syncdb, South migrate, Collecstatic and resets Apache\n branch = env.project.get('git_branch', 'master')\n with prefix(_django_prefix()):\n with cd(_django_project_dir()):\n with settings(hide('warnings'), warn_only=True):\n run('git fetch origin %s:%s' % (branch, branch))\n run('git checkout %s' % branch)\n with settings(hide('warnings'), warn_only=True):\n run('git pull origin %s' % branch)\n run('django-admin.py syncdb --noinput')\n run('django-admin.py migrate')\n run('touch config/wsgi*')\n run('django-admin.py collectstatic --noinput')", "def newproject():\n log('Criando novo projeto', yellow)\n log('Cria a conta no bitbucket com o nome do projeto vázio que o script se encarregará do resto', red)\n\n conta = raw_input('Digite o nome do projeto: ')\n\n local('echo \"clonando projeto %s\"' % bitbucket_repository)\n local('git clone {0} {1}{2}'.format(bitbucket_repository, folder_project_local, conta))\n local('cd {0}{1}'.format(folder_project_local, conta))\n local('mkvirtualenv {0}'.format(conta))\n local('setvirtualenvproject')\n local('pip install -r requirements.txt')\n local('rm -rf {0}{1}/.git'.format(folder_project_local, conta))\n local('rm -rf README.md')\n local('git init')\n local('git remote add origin [email protected]:{0}/{1}.git'.format(bitbucket_user, conta))", "def touch_project():\n remote('touch config/wsgi*')", "def createproject(project_name):\n app_clone_script = 'git clone https://github.com/jaarce/falcon-bp.git %s' % project_name\n subprocess.call(app_clone_script.split(' '))", "def djangular_boilerplate():\n git = Repo()\n path = git.absolute_path\n package_name = git.package_name\n clone = git.command\n if not os.path.exists(path):\n os.system(clone)\n rename = prompt(prompt_rename)\n if rename.get(\"rename\", True):\n os.rename(package_name, input(\"Rename directory: \"))\n else:\n pass\n elif os.path.exists(path):\n ow = prompt(prompt_overwrite)\n if ow.get(\"overwrite\", True):\n shutil.rmtree(package_name)\n os.system(clone)\n rename = prompt(prompt_rename)\n if rename.get(\"rename\", True):\n os.rename(package_name, input(\"Rename directory: \"))\n else:\n exit(\"You have chosen not to overwrite. Session ended.\")", "def clone():\n require('PROJECT_NAME')\n require('PROJECT_REPO')\n require('MERCURIAL_BIN')\n\n # Create the \"apps\" directory if it does not exist.\n run('mkdir -p {}'.format(utils.home('apps')))\n\n if files.exists(utils.home('apps', env.PROJECT_NAME)):\n delete()\n\n with cd(utils.home('apps')):\n run('{0} clone {1} {2}'.format(env.MERCURIAL_BIN,\n env.PROJECT_REPO,\n env.PROJECT_NAME))", "def deploy_project(self, svn_repo, co_dir, path_to_static, database_name, apache_conf_file_path, project_port, svn_username='', svn_password='', db_username='', db_password='', sql_paths_list=[], changes_dict={}):\n self.checkout(svn_repo, co_dir, svn_username, svn_password)\n self.change_static_to_pro(path_to_static)\n self.create_db(database_name, sql_paths_list, db_username, db_password)\n self.change_settings(co_dir, changes_dict)\n self.create_vhost(apache_conf_file_path, project_dir=co_dir, project_port=project_port)\n self.server_restart()", "def flush_repo():\n server = get_server()\n run(\"rm -rf %(project_name)s\" % env)\n git.clone()\n server.setup()", "def update_site():\n site_path = os.path.join(PROJECTS_ROOT, CURRENT_SITE)\n docs_path = os.path.join(site_path, 'doc_src')\n with cd(site_path):\n run('git pull --all')\n run('workon djangopatterns && pip install -r %s/setup/requirements.txt' % site_path)\n run('workon djangopatterns && %s/manage.py syncdb' % site_path)\n # run('workon djangopatterns && %s/manage.py migrate' % site_path)\n run('workon djangopatterns && %s/manage.py collectstatic --noinput' % site_path)\n run('workon djangopatterns && %s/manage.py compress' % site_path)\n with cd(docs_path):\n run('git pull --all')\n # run('workon djangopatterns && cd doc_src && make clean')\n # run('workon djangopatterns && cd doc_src && make json')\n reload_site()", "def _clone_gitrepo():\n # Puts git repo in ~/.ssh/config to avoid interaction due to missing known_hosts\n git_server = urllib.splituser(urllib.splittype(env.project['git_repo'])[0])[1]\n if not files.exists('~/.ssh/config') or not files.contains('~/.ssh/config', git_server):\n files.append('~/.ssh/config', ['host %s' % git_server, ' StrictHostKeyChecking no'])\n\n branch = env.project.get('git_branch', 'master')\n if files.exists(_interpolate(DJANGO_PROJECT_DIR)):\n print _interpolate('project %(project)s already exists, updating')\n remote('git pull origin %s' % branch)\n else:\n with cd(_interpolate(VIRTUALENV_DIR)):\n run(_interpolate('git clone %(git_repo)s %(project)s'))\n if branch != 'master':\n remote('git fetch origin %s:%s' % (branch, branch))\n remote('git checkout %s' % branch)", "def deploy():\n setup()\n builddir = get_build_dir()\n if sys.platform == 'win32':\n # Support cygwin rsync on windows:\n build_path = cygpath(slashed(builddir))\n else:\n build_path = slashed(builddir)\n rsync_project(env.admin_webroot, build_path, exclude=\".*\", delete=True)\n sudo(\"chmod -R 755 %(admin_webroot)s\" % env)", "def dev_site(live_path, dev_parent, dev_name, dev_db_name='',\n base_url='', rewrite_base=''):\n with mute():\n remote = git.get_remote_url(live_path)\n dev_path = '%s/%s' % (dev_parent, dev_name)\n if exists(dev_path):\n warning = \"\"\"\nA folder already exists at your destination path.\n\nDo you wish to overwrite it?\n\"\"\"\n confirm_overwrite(warning)\n\n with mute():\n run('rm -rf %s' % dev_path)\n with cd(dev_parent):\n run('git clone %s %s' % (remote, dev_name))\n\n with cd(dev_path):\n run('git fetch')\n run('git branch')\n\n # Determinine a branching strategy\n strategy_prompt = \"\"\"\nHow would you like to create your dev site:\n1) Use an existing Git branch\n2) Create a new Git branch\n:\n\"\"\"\n strategy = prompt(strategy_prompt,\n validate=validate_branching_strategy)\n\n # Checkout an existing branch\n if strategy == '1':\n branch_prompt = \"\"\"\nWhich existing branch would you like to use for this dev site?\n\"\"\"\n # TODO - add validation\n dev_branch = prompt(branch_prompt)\n run('git checkout %s' % dev_branch)\n run('git pull origin %s' % dev_branch)\n\n # Create new branch\n if strategy == '2':\n start_branch_prompt = \"\"\"\nWhich branch should we use to start from?\n\"\"\"\n start_branch = prompt(start_branch_prompt)\n run('git checkout %s' % start_branch)\n dev_branch_prompt = \"\"\"\nWhat would like to name the new dev branch?\n\"\"\"\n dev_branch = prompt(dev_branch_prompt)\n run('git checkout -b %s' % dev_branch)\n # Look for an git origin in the live site\n\n # cd to the dev parent dir and clone the repo from origin\n\n # switch to the develop branch\n\n # git fetch\n\n # git pull origin develop\n\n # Duplicate the live mysql db as a dev db\n # Look into cross platform ways to just do the db duplication without\n # needing to write the db dump file and then do the insert\n\n # Configure the settings.php and .htaccess files for the dev site\n\n # Copy the files folder from the live site to the dev site\n # Eventually there should be a option here for doing read only sym-links\n # Or maybe some S3 thingy\n\n # drush cc all on dev\n\n # done", "def project_clone(request, proj_id=None):\n\n if not proj_id or not request.user.is_authenticated():\n raise Http404\n\n project = get_object_or_404(Project, id=proj_id)\n\n if project.user != request.user and project.is_private:\n raise Http404\n\n project.pk = None\n project.user = request.user\n project.save()\n\n for scenario in Scenario.objects \\\n .filter(project_id=proj_id) \\\n .order_by('created_at'):\n scenario.pk = None\n scenario.project = project\n scenario.save()\n\n return redirect('/project/{0}'.format(project.id))", "def mkweb(project_name, mode):\n\n MAIN_FOLDER = data.get_base_path(data.WEB)\n\n if mode != 'MAIN':\n MAIN_FOLDER += f'{mode}/'\n \n webproject = folders.WebProject(project_name, MAIN_FOLDER)\n\n webproject.create_project()\n click.echo(f'Project created succesfull in {webproject.project_path}')\n cli_commands.start_git(webproject.project_path)\n cli_commands.show_dir_path(webproject.project_path)\n # cli_commands.start_vscode(webproject.project_path)\n\n click.echo('Project Path copied to clipboard...')", "def deploy_django_project(self):\n\n if self.no_files:\n return\n\n local_dir = \"{0}\".format(self.app_dir)\n app_dir = \"{0}\".format(self.app_remote_dir)\n\n if not exists(app_dir):\n mkdir(app_dir)\n\n zip_name = make_zip(local_dir, self.app_name)\n put(zip_name, self.app_remote_dir)\n\n with cd(self.app_remote_dir):\n run(\"unzip -o {0}\".format(zip_name))\n\n os.remove(zip_name)", "def deploy():\n build()\n rsync_project(\n local_dir=os.path.abspath(env.config['destination']) + \"/\",\n remote_dir=env.remote_dir,\n delete=True,\n extra_opts='--exclude=\".DS_Store\"',\n )", "def _create_main_project_and_root(self): \n if len(ComicSite.objects.filter(short_name=settings.MAIN_PROJECT_NAME)) == 0:\n main = ComicSite.objects.create(short_name=settings.MAIN_PROJECT_NAME,\n description=\"main project, autocreated by comicframeworkTestCase._create_inital_project()\",\n skin=\"fakeskin.css\"\n )\n \n main.save()\n \n try:\n self.root = User.objects.get(username='root')\n except ObjectDoesNotExist:\n # A user who has created a project\n root = User.objects.create_user('root',\n '[email protected]',\n 'testpassword') \n root.is_staff = True\n root.is_superuser = True\n root.save()\n \n self.root = root\n\n call_command('check_permissions')", "def upload():\n run('mkdir -p /srv/images/'+env.project_name+'/')\n rsync_project(\n env.project_dir, './',\n exclude=(\n '.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store', 'environment.yml',\n 'fabfile.py', 'Makefile', '.idea', 'bower_components', 'node_modules',\n '.env.example', 'README.md', 'var'\n ), delete=True)", "def git_project(soup, github_user, github_pass, github_repo, github_name):\n giturl = 'https://{user}:{password}@github.com/{user}/{repo}.git'.format(\n user=github_user, password=github_pass, repo=github_repo\n )\n oldcwd = os.getcwd()\n tmpdir = tempfile.mkdtemp()\n gitdir = os.path.join(tmpdir, github_repo)\n cmd = 'git clone {} {}'.format(shlex.quote(giturl), shlex.quote(gitdir))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(gitdir)\n rhinoscrape(soup, github_user, github_name)\n cmd = 'git add .'\n subprocess.run(shlex.split(cmd), check=False)\n msg = 'Project committed by Rhino Repo'\n cmd = 'git commit -m {}'.format(shlex.quote(msg))\n subprocess.run(shlex.split(cmd), check=False)\n cmd = 'git push {}'.format(shlex.quote(giturl))\n subprocess.run(shlex.split(cmd), check=False)\n os.chdir(oldcwd)\n shutil.rmtree(tmpdir, ignore_errors=True)", "def project_refresh(project_name):\n if not db_find_project(project_name):\n abort(404)\n analyser.add_repos(current_user.username, [project_name])\n return redirect(url_for('main.admin_manage'))", "def create_project_form(request):\n \n # First we check to see the site has been set up, otherwise we throw the user to the config screen\n if not bool(os.path.isdir(Project.project_options.repository_directory)):\n request.user.message_set.create(message=\"The site has not been set up yet. Log in as your admin user and create your settings!\")\n return HttpResponseRedirect(reverse('site-config'))\n \n if request.is_ajax():\n template ='project/project_create_ajax.html'\n else:\n template = 'project/project_create.html'\n \n # Lets check if this form is being shown or processed\n if request.method == \"POST\":\n # We're processing the form, so lets create the instance\n form = NewProjectForm(request.POST, auto_id=False)\n # The form is correct, lets proceeed.\n if form.is_valid():\n # Lets check the user has conformed to a sites T&C's\n if form.cleaned_data['t_and_c'] == True:\n # Create the project instance\n project = Project(\n project_id = string.lower(form.cleaned_data['project_id']),\n project_name = form.cleaned_data['project_name'],\n short_description = form.cleaned_data['short_description'],\n full_description = form.cleaned_data['full_description'],\n project_manager = request.user,\n hgweb_style = form.cleaned_data.get('hgweb_style', ''),\n project_icon = form.cleaned_data['project_icon'],\n )\n # Ok, we're all good, so lets save.\n project.save()\n # We'll tell the user that there site has been saved\n request.user.message_set.create(message=_(\"The project \" + form.cleaned_data['project_name'] + \" has been created\"))\n if request.is_ajax():\n return HttpResponse(\n \"{'success': 'true', 'url': '\" + reverse('project-detail', kwargs={'slug':form.cleaned_data['project_id']}) + \"', 'project': \" + json_encode(project) + \"}\"\n , mimetype=\"application/json\")\n else:\n return HttpResponseRedirect(reverse('project-detail', kwargs={'slug': form.cleaned_data['project_id']}))\n else:\n return render_to_response(template,\n {\n 'form':form.as_table(),\n }, context_instance=RequestContext(request)\n )\n #return HttpResponseRedirect(reverse('project-detail', kwargs={'slug':form.cleaned_data['name_short']}))\n else:\n form = NewProjectForm()\n is_auth = request.user.is_authenticated()\n \n return render_to_response(template,\n {\n 'form':form.as_table(),\n 'is_auth': is_auth\n }, context_instance=RequestContext(request)\n )", "def edit_files(project_name, app_name):\n SETTINGS = f'{project_name}/backend/backend/settings.py'\n PACKAGE_JSON = f'{project_name}/frontend/package.json'\n\n\n c1 = f\"\\n \\t'corsheaders', \\n\\t'rest_framework', \\n\\t'{app_name}',\\n\"\n add_to_line(SETTINGS, 32, c1 )\n\n c2 = f\"\\n \\t'corsheaders.middleware.CorsMidleware',\\n\"\n add_to_line(SETTINGS, 44, c2 )\n \n with open(SETTINGS, 'a+') as f:\n f.write(\"\\nCORS_ORIGIN_WHITELIST = ['localhost:3000/']\")\n\n c3 = '\\n\\t\"proxy\": \"http://localhost:8000\",\\n'\n add_to_line(PACKAGE_JSON, 3, c3)", "def update_project():\n with cd(env.code_dir):\n with _virtualenv():\n run('git pull origin master')\n install_requirements()\n perform_migration()\n collect_static()", "def upload():\n env.user = 'webcontent'\n rsync_project(DOCDIR, 'doc/_build/html/', delete=True)", "def deploy():\n remote_dir = os.path.abspath(os.path.join(REMOTE_BASE_DIR, REPO_NAME))\n \n with settings(warn_only=True):\n if run(\"test -d %s\" % (remote_dir)).failed:\n puts(red(\"[Repo %s does not exist on remote at: %s]\" % (REPO_NAME, remote_dir)))\n with cd(REMOTE_BASE_DIR):\n run(\"git clone %s %s\" % (REPO_URL, REPO_NAME))\n\n puts(yellow(\"[Write logs]\"))\n run(\"echo '-----------------------------' > %s\" % REMOTE_ERR_FILE)\n run(\"echo `date` >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' > %s\" % REMOTE_LOG_FILE)\n run(\"echo `date` >> %s\" % REMOTE_LOG_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_LOG_FILE)\n\n puts(yellow(\"[Update repo: %s]\" % REPO_NAME))\n with cd(remote_dir):\n run(\"git pull origin master >> %s 2>> %s\" %\n (REMOTE_LOG_FILE, REMOTE_ERR_FILE))\n\n # reminder new static files\n puts(yellow('Do not forget to run collect staticfiles on DJANGO server.'))", "def deploy(app_to_migrate=\"\"):\n mysqldump() # backup database before making changes\n with cd(code_dir):\n run(\"git pull\")\n run(python_add_str + \"python manage.py migrate %s\" % app_to_migrate)\n run(python_add_str + \"python manage.py createinitialrevisions\") # only if using reversion\n run(python_add_str + \"python manage.py collectstatic --noinput\")\n run(\"../apache2/bin/restart\")", "def startproject(self):\n\n path = os.path.join(self.path, self.project_name)\n if os.path.exists(path):\n raise exceptions.ProjectDirectoryAlreadyExistsError(self.project_name)\n else:\n os.makedirs(path)\n\n context = {\n 'project_name': self.project_name,\n 'default_region': self.region,\n 'random': hashlib.sha1(six.text_type(random.random()).encode('utf-8')).hexdigest()[:8]\n }\n\n self._clone_defaults(\n os.path.join(self.root, 'defaults', 'project'),\n path,\n context\n )", "def push(ref='origin/master'):\n from fabric.api import local, run, cd\n from fabric.contrib.project import rsync_project\n local('pelican -s %s -d' % env.config_file)\n rsync_project(\n remote_dir=env.host_site_path,\n local_dir='output/',\n delete=True\n )\n if env.host_type != 'production':\n run(\"chown -R %(user)s:%(host_webserver_user)s %(host_site_path)s \"\n \"&& chmod -R 02750 %(host_site_path)s\" % env)", "def prepare_deploy():\n from fabdeploy.django import test as django_test\n django_test()\n git.add_commit_pull()\n git.push()", "def deploy():\n _git_pull()\n _migrate()\n _collect_static_files()\n _restart_webserver()", "def deploy():\n with cd(\"~/public_html/\"):\n run(\"/usr/local/cpanel/3rdparty/bin/git pull\")\n\n with cd(\"~/public_html/skin/frontend/gemz/default/tools/\"):\n run(\"grunt default\")\n #sudo(\"/scripts/enablefileprotect\")", "def create_new_python_project():\n\t# Create the different variables\n\tfolder_name = str(sys.argv[1])\n\tdir_name = my_project_folder + folder_name\n\tpy_file = dir_name + '/' + folder_name + '.py'\n\treadme_file = dir_name + '/' + 'README.md'\n\ttodo_file = dir_name + '/' + 'TODO.txt'\n\n\t# Create directory if it does not exist yet\n\tif not os.path.exists(dir_name):\n\t\tos.mkdir(dir_name)\n\t\tprint(\"Directory \" , dir_name , \" Created \")\n\n\t\t# Create Python file\n\t\tdata = ''\n\t\twith open(template_py, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(py_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Python file created\")\n\n\t\t# Create README file\n\t\tdata = ''\n\t\twith open(template_readme, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(readme_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Readme file created\")\n\n\t\t# Create Todo file\n\t\twith open(todo_file, 'w') as f:\n\t\t\tprint(\"TODO file created\")\n\n\t\t# Create Github repo\n\t\twith open(\".env\", \"r\") as f:\n\t\t\tdata = f.read()\n\n\t\tindex_1 = data.find('TOKEN=\"') + len('TOKEN=\"')\n\t\ttoken = data[index_1:-1]\n\t\tg = Github(token)\n\t\tuser = g.get_user()\n\t\trepo = user.create_repo(folder_name)\n\t\tprint(\"Succesfully created repository {}\".format(folder_name))\n\n\n\telse: \n\t\tprint(\"Directory \" , dir_name , \" already exists\")", "def deploy():\n base_dir = '/webapps/seb_django/www'\n submodule = 'courses/bioinf-workshop'\n\n puts(yellow(\"[Activate env]\"))\n run('source ~/bin/activate')\n \n with settings(warn_only=True):\n if run(\"test -d %s/%s\" %(base_dir, submodule)).failed:\n puts(red(\"[Submodule does not exist: %s]\"%submodule))\n with cd(base_dir):\n run(\"git submodule add [email protected]:sschmeier/bioinf-workshop.git %s\" %(submodule))\n run(\"git submodule init\")\n \n puts(yellow(\"[Update submodule: %s]\"%submodule))\n with cd(base_dir):\n run(\"git submodule update --remote --merge %s\"%submodule)", "def configure_project():\n pass", "def clone_remote_theme(save_dir: str, config: dict):\r\n os.makedirs(save_dir, exist_ok=True)\r\n\r\n repo_dir = os.path.join(save_dir, config['name'])\r\n\r\n if os.path.exists(repo_dir):\r\n force_rmtree(repo_dir)\r\n\r\n repo_url = config['url']\r\n repo_branch = config.get('branch', 'master')\r\n repo_tag = config.get('tag', '')\r\n\r\n def safe_run(command, cwd):\r\n try:\r\n run(command, cwd)\r\n except Exception:\r\n raise TemplateError('Cannot fetch theme from ' + repo_url)\r\n\r\n safe_run('git clone -b %s %s %s' % (repo_branch, repo_url, repo_dir), '.')\r\n if repo_tag != '':\r\n safe_run('git checkout %s' & repo_tag, repo_dir)", "def newdev():\n log('Configura uma computador Ubuntu para trabalhar python/django', yellow)\n update_local()\n upgrade_local()\n\n # pacotes\n build_local()\n python_local()\n mysql_local()\n git_local()\n\n # atualizando\n update_local()\n upgrade_local()", "def django_setup_wing_project():\r\n\r\n actions = []\r\n errs = []\r\n \r\n app = wingapi.gApplication\r\n manage_py, settings_py = _CDjangoPluginActivator._instance._FindKeyFiles()\r\n if manage_py is None:\r\n title = _(\"Django Files Not Found\")\r\n msg = _(\"Please add your Django project directory to your Wing IDE Project then \"\r\n \"try again. This command requires that the files manage.py and settings.py \"\r\n \"can be found in the project.\")\r\n app.ShowMessageDialog(title, msg)\r\n return\r\n \r\n # Set up Python Executable\r\n proj = app.GetProject()\r\n try:\r\n django_admin = proj.GetAttribute('django-admin')\r\n except KeyError:\r\n errs.append(_(\"Could not determine which Python Executable to use in Project Properties. \"\r\n \"Set this manually if the default Python \"\r\n \"is not the one being used with Django.\"))\r\n else:\r\n pyexec = _get_pyexec_from_django_admin(django_admin)\r\n if pyexec is None:\r\n errs.append(_(\"Could not obtain Python Executable to use in Project Properties from \"\r\n \"%s. Set this manually if the default Python \"\r\n \"is not the one being used with Django.\") % django_admin)\r\n else:\r\n proj.SetPythonExecutable(None, pyexec)\r\n actions.append(_(\"%s was set as the Python Executable in Project Properties\") % pyexec)\r\n \r\n # Set up main debug file and its run arguments\r\n proj.SetMainDebugFile(manage_py)\r\n runargs = 'runserver --noreload 8000'\r\n proj.SetRunArguments(manage_py, runargs)\r\n actions.append(_(\"%s was set as main debug file with run arguments \"\r\n \"%s\") % (manage_py, runargs))\r\n \r\n # Set up environment project-wide\r\n env = proj.GetEnvironment(None, overrides_only=True).copy()\r\n proj_dir = _CDjangoPluginActivator._instance._GetDjangoProjectDir()\r\n env['DJANGO_SITENAME'] = os.path.basename(proj_dir)\r\n env['DJANGO_SETTINGS_MODULE'] = \"${DJANGO_SITENAME}.settings\"\r\n \r\n # Add site directory and enclosing directory to python path if not already \r\n # in there. Note that by default Django has only the site directory on\r\n # the path and seems to use import magic for \"from sitename.models import *\"\r\n pypath = env.get('PYTHONPATH', None)\r\n if pypath is not None:\r\n pypath = pypath.split(os.pathsep)\r\n else:\r\n pypath = []\r\n if not os.path.dirname(proj_dir) in pypath:\r\n pypath.append(os.path.dirname(proj_dir))\r\n if not proj_dir in pypath:\r\n pypath.append(proj_dir)\r\n pypath = os.pathsep.join(pypath)\r\n env['PYTHONPATH'] = pypath\r\n \r\n # Set both env and pypath into project properties\r\n proj.SetEnvironment(None, 'startup', env)\r\n actions.append(_(\"DJANGO_SITENAME and DJANGO_SETTINGS_MODULE \"\r\n \"environment variables were added to the project-wide environment\"))\r\n actions.append(_(\"added directories %s and %s to the Python Path in Project Properties\") % (os.path.dirname(proj_dir), proj_dir))\r\n \r\n # Make sure that template debugging is enabled in settings file\r\n try:\r\n f = open(settings_py)\r\n txt = f.read()\r\n f.close()\r\n except:\r\n errs.append(_(\"Failed to read %s to modify TEMPLATE_DEBUG setting\") % settings_py)\r\n else:\r\n changed = False\r\n found = False\r\n lines = txt.splitlines()\r\n eol = _get_eol(txt)\r\n for i in range(0, len(lines)):\r\n if lines[i].strip().startswith('TEMPLATE_DEBUG'):\r\n words = lines[i].split()\r\n if len(words) > 2 and words[1] == '=':\r\n found = True\r\n if words[2] not in ('1', 'True'):\r\n leading = lines[i][:lines[i].find('TEMPLATE_DEBUG')]\r\n lines[i] = '#' + lines[i]\r\n lines.insert(i+1, leading + 'TEMPLATE_DEBUG = True')\r\n changed = True\r\n break\r\n if not found:\r\n lines.extend(['', 'TEMPLATE_DEBUG = True', ''])\r\n changed = True\r\n if changed:\r\n try:\r\n f = open(settings_py, 'w')\r\n txt = eol.join(lines)\r\n f.write(txt)\r\n f.close()\r\n except:\r\n errs.append(_(\"Failed to write %s with TEMPLATE_DEBUG enabled\") % settings_py)\r\n else:\r\n actions.append(_(\"set TEMPLATE_DEBUG = True in the site's settings.py file so \"\r\n \"Wing's debugger can debug templates\"))\r\n \r\n # Make sure template debugging project property is enabled\r\n import proj.attribs\r\n value = app.fSingletons.fFileAttribMgr.GetValue(proj.attribs.kTemplateDebugging)\r\n if not value:\r\n app.fSingletons.fFileAttribMgr.SetValue(proj.attribs.kTemplateDebugging, True)\r\n actions.append(_(\"enabled Template Debugging in Project Properties\"))\r\n\r\n # Set up unit testing in project properties\r\n import testing.attribs\r\n import testing.adaptors\r\n manage_loc = location.CreateFromName(manage_py)\r\n value = app.fSingletons.fFileAttribMgr.GetValue(testing.attribs.kTestFramework, manage_loc)\r\n if value != testing.adaptors.CDjangoTestAdaptor.internal_id:\r\n app.fSingletons.fFileAttribMgr.SetValue(testing.attribs.kTestFramework, manage_loc,\r\n testing.adaptors.CDjangoTestAdaptor.internal_id)\r\n cmd = 'add-testing-files(locs=\"%s\")' % manage_py\r\n app.ExecuteCommand(cmd)\r\n actions.append(_(\"configured unit testing for Django\"))\r\n\r\n # Show confirmation to user\r\n title = _(\"Django Configuration Complete\")\r\n msg = _(\"The project file has been configured for Django. \")\r\n if errs:\r\n msg += _get_errors_list(errs)\r\n msg += _get_actions_list(actions)\r\n app.ShowMessageDialog(title, msg, modal=False)", "def _deploy_app():\n rsync_project(env.remote_directory, env.local_directory,\n exclude=['.git/', '*.pyc', 'tests.py', 'migrations/'])\n sudo('service installer_app restart')", "def deploy():\n\n project_dir = '/home/gastosabertos/gastos_abertos_website'\n with cd(project_dir):\n local('tar -cvzf build.tar.gz build')\n run('cp -r build build-old')\n put('build.tar.gz', '.')\n run('tar -xvf build.tar.gz')", "def deploy():\n git_pull()\n if confirm(\"Install/upgrade requirements with pip?\"):\n install_requeriments()\n django_command('collectstatic')\n django_command('migrate')\n restart()", "def create_projects(self):\n if self.gl is None or self.config is None:\n print(\"No config/Gitlab found, please run connect first.\")\n exit(1)\n else:\n print(\"Starting Project creation.\")\n gl = self.gl\n config = self.config\n for project in config[\"projects\"]:\n # get the import url\n imp_url = config[\"projects\"][project][\"import_url\"]\n\n # Set rights/members/protected master\n if config[\"projects\"][project][\"owner_conf\"][\"owner\"] == \"all_users\":\n for user in self.users:\n print(\"Importing \\'\" + imp_url + \"\\' for user \\'\" + user.username + \"\\'\")\n pj = user.projects.create({'name': project,\n 'user_id': user.id,\n 'access_level': gitlab.OWNER_ACCESS,\n 'import_url': imp_url})\n elif config[\"projects\"][project][\"owner_conf\"][\"owner\"] == \"user\":\n for user in self.users:\n if user.username == config[\"projects\"][project][\"owner_conf\"][\"name\"]:\n print(\"Importing \\'\" + imp_url + \"\\' for user \\'\" + user.username + \"\\'\")\n pj = user.projects.create({'name': project,\n 'user_id': user.id,\n 'Access_level': gitlab.OWNER_ACCESS,\n 'import_url': imp_url})\n elif config[\"projects\"][project][\"owner_conf\"][\"owner\"] == \"group\":\n for group in self.groups:\n if group.name == config[\"projects\"][project][\"owner_conf\"][\"name\"]:\n print(\"Importing \\'\" + imp_url + \"\\' for group \\'\" + group.name + \"\\'\")\n pj = group.projects.create({'name': project,\n 'namespace_id': group.id,\n 'import_url': imp_url})\n else:\n print(\"Project owner Config is wrong, aborting\")\n exit(1)\n # Delete protected Master Branch\n if config[\"projects\"][project][\"protect_master_branch\"] == \"False\":\n print(\"Removing Project master Branch protection\")\n pj.protectedbranches.delete('master')", "def new_app(project_name,app_name ):\n from flask_create_app.core.commands.cmd_newapp import create_new_app\n proj_dir = os.getcwd()\n create_new_app(app_name, proj_dir,project_name)", "def do(args):\n worktree = qisys.parsers.get_worktree(args)\n\n project_name = args.project_name\n project_path = os.path.join(os.getcwd(), project_name)\n\n if os.path.exists(project_path):\n raise Exception(\"%s already exists\" % project_path)\n os.mkdir(project_path)\n copy_helper(project_name, project_path)\n\n if args.git:\n qisys.command.call([\"git\", \"init\"], cwd=project_path)\n with open(os.path.join(project_path, \".gitignore\"), \"w\") as fp:\n fp.write(\"build-*\\n\")\n qisys.command.call([\"git\" , \"add\" , \".\"], cwd=project_path)\n qisys.command.call([\"git\" , \"commit\" , \"-m\" , \"initial commit\"], cwd=project_path)\n\n ui.info(ui.green, \"New project initialized in\", ui.bold, project_path)\n worktree.add_project(project_path)\n return worktree.get_project(project_path)", "def new_project(file_path):\n project_template_dir = pkg_resources.resource_filename('camtasia', os.path.join('resources', 'new.cmproj'))\n shutil.copytree(project_template_dir, file_path)", "def push_blog():\n\n\twarn(green(\"Update blog on github pages.\"))\n\t_setup_virtualenv()\n\n\twith cd(PROJECT_PATH):\n\t\twith prefix(env.activate):\n\t\t\tlocal('python blog.py build', shell='/bin/bash')\n\n\t\tlocal('cd {}'.format(FREEZER_DESTINATION), shell='/bin/bash')\n\t\tlocal('git status')\n\t\task_msg = red(\"Force push new content to blog?\")\n\t\tif console.confirm(ask_msg, default=False) is True:\n\t\t\tlocal('git add --all')\n\t\t\tlocal('git commit -m \"new articles\"')\n\t\t\tlocal('git push --force origin master')", "def project_starter(project_name,yaml_project):\n snpt.load_snippets()\n archives = yaml.load(yaml_project)\n make_project_structure(archives,\"./\",project_name)\n make_exec(project_name + '/manage.py')", "def populate_source():\n\n if os.path.exists('conf.py'):\n return\n\n branch = _get_source_branch()\n\n local('git checkout %s' % branch)\n init_site()\n _git_commit_all('Initial commit\\n[skip ci]')\n _git_push(branch)", "def install_django_project(self):\n\n from django.conf import settings as django_settings\n\n with cd(\"{0}\".format(self.app_remote_dir)):\n\n pip(\"install -r requirements.txt\")\n\n with cd(\"{0}\".format(self.app_package)):\n self.setup_settings_local()\n\n self.syncdb(django_settings)\n self.setup_gunicorn_supervisor()", "def reset_project(ctx, path):\n with ctx.cd(path):\n ctx.run(\"rm -rf project.state repos\")\n ctx.run(\"newt -v upgrade\")", "def project():", "def project():", "def project():", "def sync_config():\n rsync_project(remote_dir='/apps/sharejs-rethinkdb-example/config/', local_dir='./config/')", "def _bootstrap():\r\n import os\r\n import sys\r\n \r\n pwd = os.path.dirname(__file__)\r\n \r\n (parent_directory, project_name) = os.path.split(pwd)\r\n \r\n # protect template itself from being bootstrapped\r\n if project_name == 'django_project_template':\r\n abort('bootstrap should not be run on project template!')\r\n\r\n env.project_name = project_name\r\n env.project_domain = env.project_name.split('.')[0].replace('_','-')\r\n \r\n def replace_in_files(path, find, replace):\r\n \r\n import fileinput\r\n \r\n if os.path.isfile(path):\r\n for line in fileinput.input(path, inplace=1):\r\n if find in line:\r\n line = line.replace(find, replace)\r\n sys.stdout.write(line)\r\n \r\n if os.path.isdir(path):\r\n # do not replace in virtual env\r\n if os.path.split(path)[1] == env.virtualenv_dir:\r\n return\r\n for f in os.listdir(path):\r\n replace_in_files(os.path.join(path, f), find, replace)\r\n\r\n # 'escape' placeholders here to protect them from being replaced\r\n replace_in_files(pwd, '@PROJECT_NAME' + '@', env.project_name)\r\n replace_in_files(pwd, '@PROJECT_DOMAIN' + '@', env.project_domain)", "def git_post_install(projects_yaml):\n http_proxy = git_yaml_value(projects_yaml, 'http_proxy')\n if http_proxy:\n pip_install('mysql-python', proxy=http_proxy,\n venv=git_pip_venv_dir(projects_yaml))\n else:\n pip_install('mysql-python',\n venv=git_pip_venv_dir(projects_yaml))\n\n src_etc = os.path.join(git_src_dir(projects_yaml, 'keystone'), 'etc')\n configs = {\n 'src': src_etc,\n 'dest': '/etc/keystone',\n }\n\n if os.path.exists(configs['dest']):\n shutil.rmtree(configs['dest'])\n shutil.copytree(configs['src'], configs['dest'])\n\n # NOTE(coreycb): Need to find better solution than bin symlinks.\n symlinks = [\n {'src': os.path.join(git_pip_venv_dir(projects_yaml),\n 'bin/keystone-manage'),\n 'link': '/usr/local/bin/keystone-manage'},\n ]\n\n for s in symlinks:\n if os.path.lexists(s['link']):\n os.remove(s['link'])\n os.symlink(s['src'], s['link'])\n\n render('git/logging.conf', '/etc/keystone/logging.conf', {}, perms=0o644)\n\n bin_dir = os.path.join(git_pip_venv_dir(projects_yaml), 'bin')\n # The charm runs the keystone API under apache2 for openstack liberty\n # onward. Prior to liberty upstart is used.\n if CompareOpenStackReleases(os_release('keystone')) < 'liberty':\n keystone_context = {\n 'service_description': 'Keystone API server',\n 'service_name': 'Keystone',\n 'user_name': 'keystone',\n 'start_dir': '/var/lib/keystone',\n 'process_name': 'keystone',\n 'executable_name': os.path.join(bin_dir, 'keystone-all'),\n 'config_files': ['/etc/keystone/keystone.conf'],\n 'log_file': '/var/log/keystone/keystone.log',\n }\n\n templates_dir = 'hooks/charmhelpers/contrib/openstack/templates'\n templates_dir = os.path.join(charm_dir(), templates_dir)\n render('git.upstart', '/etc/init/keystone.conf', keystone_context,\n perms=0o644, templates_dir=templates_dir)\n\n # Don't restart if the unit is supposed to be paused.\n if not is_unit_paused_set():\n service_restart(keystone_service())", "def new_repo(req, source, psp_dir, url_helper=None):\n req.content_type = 'text/html'\n repo_dir = req.filename.rsplit('/', 1)[0]\n files = [f for f in os.listdir(repo_dir) if f[-3:] == '.h5']\n top_level = psp.PSP(req, filename=psp_dir+'new_repo.psp')\n top_level.run({'context': req.uri,\n 'files': files})", "def newproject_view(request):\n\n # Use to tell to the template that the user want to creat a new project\n is_new = True\n\n # Get all the user. Everyone may be member of the project\n users = User.objects.all()\n\n # If the view received data, try to creat a project\n if request.method == \"POST\":\n form = ProjectForm(request.user, request.POST)\n if form.is_valid():\n # Save the new project in the database\n form.save(commit=True)\n\n # redirect to the project list display page\n return redirect(\"projects\")\n else:\n # creat an empty form for the template\n form = ProjectForm(request.user)\n\n return render(request, 'newProject.html', locals())", "def create_new_project():\n readline.parse_and_bind('tab: complete')\n\n print \\\n\"\"\"\n xbmcswift2 - A micro-framework for creating XBMC plugins.\n [email protected]\n --\n\"\"\"\n print 'I\\'m going to ask you a few questions to get this project' \\\n ' started.'\n\n # noinspection PyDictCreation\n opts = {}\n\n # Plugin Name\n opts['plugin_name'] = get_valid_value(\n 'What is your plugin name?',\n validate_nonblank\n )\n\n # Plugin ID\n opts['plugin_id'] = get_valid_value(\n 'Enter your plugin id.',\n validate_pluginid,\n 'plugin.video.%s' % (opts['plugin_name'].lower().replace(' ', ''))\n )\n\n # Parent Directory\n opts['parent_dir'] = get_valid_value(\n 'Enter parent folder (where to create project)',\n validate_isfolder,\n getcwd()\n )\n opts['plugin_dir'] = os.path.join(opts['parent_dir'], opts['plugin_id'])\n assert not os.path.isdir(opts['plugin_dir']), \\\n 'A folder named %s already exists in %s.' % (opts['plugin_id'],\n opts['parent_dir'])\n\n # Provider\n opts['provider_name'] = get_valid_value(\n 'Enter provider name',\n validate_nonblank,\n )\n\n # Create the project folder by copying over skel\n copytree(SKEL, opts['plugin_dir'], ignore=ignore_patterns('*.pyc'))\n\n # Walk through all the new files and fill in with out options\n for root, dirs, files in os.walk(opts['plugin_dir']):\n for filename in files:\n update_file(os.path.join(root, filename), opts)\n\n print 'Projects successfully created in %s.' % opts['plugin_dir']\n print 'Done.'", "def put_settings_files(env='development'):\n projects = build_projects_vars()\n project = projects[env]\n if exists('%(dir)s/%(inner_dir)s' % project):\n put(project['settings_path'], '%(dir)s/%(inner_dir)s/local_settings.py' % project)\n if env == 'production':\n with cd('%(dir)s/%(inner_dir)s' % project):\n sed('local_settings.py', '^DEBUG = True$', 'DEBUG = False')", "def new_project(self, rootdir=None):\n if rootdir is None:\n rootdir = Ui.instance().select_directory(user.home)\n if not os.path.exists(rootdir):\n os.makedirs(rootdir)\n\n print 'Weld.new_project in ', rootdir\n project = Project(rootdir)\n\n project.save()\n self.project = project\n self.current_project_path = rootdir\n Ui.instance().set_resources_draggable(True)\n Ui.instance().show_status('new project created')", "def create_project(opts):\n if opts['django']:\n structure.create_django_proj(opts)\n if opts['cookiecutter_template']:\n structure.create_cookiecutter(opts)\n proj_struct = structure.make_structure(opts)\n structure.create_structure(proj_struct,\n update=opts['update'] or opts['force'])\n if not opts['update'] and not repo.is_git_repo(opts['project']):\n repo.init_commit_repo(opts['project'], proj_struct)", "def repo_new(request):\n if request.method != 'POST':\n form = RepoForm()\n return respond(request, 'repo_new.html', {'form': form})\n form = RepoForm(request.POST)\n errors = form.errors\n if not errors:\n try:\n repo = models.Repository(\n name=form.cleaned_data.get('name'),\n url=form.cleaned_data.get('url'),\n guid=form.cleaned_data.get('guid'),\n )\n except (db.BadValueError, ValueError) as err:\n errors['__all__'] = unicode(err)\n if errors:\n return respond(request, 'repo_new.html', {'form': form})\n repo.put()\n branch_url = repo.url\n if not branch_url.endswith('/'):\n branch_url += '/'\n branch_url += 'trunk/'\n branch = models.Branch(repo_key=repo.key, repo_name=repo.name,\n category='*trunk*', name='Trunk',\n url=branch_url)\n branch.put()\n return HttpResponseRedirect(reverse(repos))", "def create_base_projects_folder():\n if '.wcscanner' not in os.listdir(context.__BASE_PATH__):\n os.mkdir(context.__PROJECTS_PATH__, mode=0o777)\n log.info(\"Base folder '.wcscanner' created in %s\", context.__BASE_PATH__)\n else:\n log.info(\"Base folder '.wcscanner' already in %s\", context.__BASE_PATH__)", "def setup_project():\n _require_environment()\n\n # Checks if needed conf files for this environment already exist\n if not os.path.exists(_interpolate('%(settings)s.py')):\n abort(_interpolate('There is no settings.py for %(environment)s - create one, and commit'))\n if not os.path.exists(_interpolate('config/apache_%(environment)s.conf')):\n abort(_interpolate('There is no Apache conf for %(environment)s - use task \"generate_apache_conf\" to generate one, and commit'))\n if not os.path.exists(_interpolate('config/wsgi_%(environment)s.py')):\n abort(_interpolate('There is no WSGI conf for %(environment)s - use task \"generate_wsgi_conf\" to generate one, and commit'))\n\n # Configures virtualenv and clones git repo\n _setup_virtualenv()\n _clone_gitrepo()\n\n # Issues extra commands at project's level, if any\n extra_commands()\n\n # Sets up Apache, MySQL\n _setup_project_apache()\n _drop_database_mysql()\n _setup_project_mysql()\n\n # Finish installation\n pip_install()\n update_project()", "def update_site(env='development', update_settings='n', upgrade_apps='n'):\n update_project(env, update_settings)\n update_apps(env, upgrade_apps)", "def init(self):\n # Create the default project files\n self.create_from_templates()\n\n # Add all the newly created files to the git staging area\n self.add_all_untracked()\n\n # Check that a compatible version of Python is available; install it if not\n self._pyenv.ensure_python(self.get_python_version())\n\n # Create virtualenv\n self._pyenv.create_virtualenv(self.name, self.get_python_version())", "def bootstrap():\n create_virtualenv()\n install_init_script()\n if not files.exists(env.code_root):\n clone_all()\n #deploy_from_local()\n pull_and_checkout_all()\n update_requirements()\n print '\\nNow add your database password to localsettings.py and run syncdb'", "def _init_remote():\r\n require('path', provided_by = [staging])\r\n\r\n create_project_dir()\r\n deploy_nosyncdb()\r\n create_virtualenv()\r\n install_requirements()\r\n create_db()\r\n create_secret_settings()\r\n syncdb()\r\n createsuperuser()\r\n install_site()\r\n reload()", "def updateProjects(request):\n\n updater = ProjectUpdater()\n updater.run()\n return http.HttpResponse(\"Ok\")", "def _prep_client_dist_for_project(project_env, project_root_dir):\n #need to make the project's index.html the index.html that tomcat will find\n clientdir = os.path.join(project_root_dir, 'client')\n index_target_fn = os.path.join(clientdir, 'index.html')\n\n index_src_base = 'index.' + project_env.get_project_name() + '.html'\n index_src_fn = os.path.join(clientdir, index_src_base)\n cmd = 'cp ' + index_src_fn + ' ' + index_target_fn\n cr = container_users.make_host_user_command_runner()\n result = cr.run(cmd)\n return result.get_exit_code()", "def deploy(env='development', update_settings='n', upgrade_apps='n'):\n update_site(env, update_settings, upgrade_apps)\n restart_site(env)", "def build():\n local('python manage.py build \\\n --skip-static --settings={{ project_name }}.settings.production')\n\n # hack to move whole directory over to build\n local('cd {} && mv static/* build/'.format(settings.BASE_DIR))", "def create(ctx, template_name, website_name):\n try:\n # Check if the destination directory already exists\n path = os.path.join(ctx.obj['BASEDIR'], website_name)\n if os.path.exists(path):\n answer = input('Do you want to delete the existing directory? [Y] ')\n if answer.lower() == 'y' or answer == '':\n shutil.rmtree(path)\n\n # Generate github repo string\n github_name = template_name\n if '/' not in template_name:\n github_name = 'docker-hosting/%s-template' % template_name\n \n # Try to download repository\n link = 'https://github.com/%s/archive/master.zip' % github_name\n urlretrieve(link, 'master.zip')\n\n # Unzip downloaded file to destination directory\n zip_ref = zipfile.ZipFile('master.zip', 'r')\n zip_ref.extractall(path)\n zip_ref.close()\n\n # The destination folder contains another folder named [github-repo-name]-master.\n # We need to move all files within this directory and delete it afterwards.\n repo_name = github_name.split('/')[1]\n master_dir = os.path.join(path, repo_name + '-master')\n for file in os.listdir(master_dir):\n shutil.move(os.path.join(master_dir, file), path)\n os.rmdir(os.path.join(path, repo_name + '-master'))\n\n # Now remove the file master.zip\n os.remove('master.zip')\n except PermissionError as e:\n # TODO: handle and log exceptions\n print('%s\\n%s' % (e, 'Note: Try to running this program as Administrator.'))\n except Exception as e:\n # TODO: handle and log exceptions\n print(e)", "def djangofy(project):\n click.echo(click.style('Making Vue.js {project} into django app'.format(project=project), bg='blue', fg='white'))\n urls_py = URLS_TEMPLATE.format(project=project)\n try:\n os.makedirs('{project}/templates/{project}/'.format(project=project))\n except OSError:\n click.echo(click.style('Command already executed', fg='red'))\n sys.exit(0)\n with cd(project):\n touch('__init__.py')\n touch('index.html', 'templates/{project}/'.format(project=project))\n with open('urls.py', 'w') as f:\n f.write(urls_py)\n with open('package.json', 'r+') as f:\n pakckage_json = json.loads(''.join(f.readlines()), object_pairs_hook=OrderedDict)\n pakckage_json['scripts']['build'] += ' && pyvue djbuild {project}'.format(project=project)\n f.seek(0)\n f.write(json.dumps(pakckage_json, indent=2))\n with cd('config'):\n with open('index.js', 'r+') as f:\n lines = f.readlines()\n f.seek(0)\n for line in lines:\n f.write(line\n .replace('../dist/index.html', '../templates/{project}/index.html'.format(project=project))\n .replace('../dist', '../static')\n .replace(\"assetsSubDirectory: 'static'\",\n \"assetsSubDirectory: '{project}'\".format(project=project)))\n\n click.echo(click.style('Enjoy!', fg='green'))", "def git_install(projects_yaml):\n if git_install_requested():\n git_pre_install()\n projects_yaml = git_default_repos(projects_yaml)\n git_clone_and_install(projects_yaml, core_project='keystone')\n git_post_install(projects_yaml)", "def upgrade_project(ctx, path):\n with ctx.cd(path):\n ctx.run(\"newt upgrade\")", "def post_process(self, **kwargs):\n self.create_ignore()\n click.echo('Create project {} successfully. Enjoy yourself!'.format(self.app_dir))", "def newrepo():\n form = AddRepoForm()\n if form.validate_on_submit():\n\n # make the directory for this package\n os.mkdir(DATA + form.name.data)\n\n flash('Repo created successfully')\n\n # redirect to the login page\n return redirect(url_for('home.dashboard'))\n\n # load registration template\n return render_template('home/add.html', form=form, title='Local Repo', target=\"add\")", "def deploy():\n with cd(env.REMOTE_CODEBASE_PATH):\n run(\"git pull\")\n run(\"go build -o app\")\n sudo(\"supervisorctl reload\")", "def clone():\n with cd(os.path.dirname(env.proj_root.rstrip('/'))):\n run('git clone --recursive %s' % (git_repo,))", "def clone_github_repo(self):\n repository_local_destination = os.path.join(MODULES_PATH, 'github', self.username, self.repository_name)\n if not os.path.exists(repository_local_destination):\n Repo.clone_from(self.repo_url, repository_local_destination, branch='master')\n init_filename = os.path.join(repository_local_destination, '__init__.py')\n open(init_filename, 'a').close()", "def clone_repo():\n with settings(warn_only=True):\n run('git clone %(repository_url)s %(repo_path)s' % env)", "def __gitEditRepoConfig(self):\n self.vcs.gitEditConfig(self.project.getProjectPath())", "def create_bare_repo(self, domain):\n\n domain_dir = self.get_domaindir(domain)\n www_dir = domain_dir + \"/www\"\n www_git = domain_dir + \"/www.git\"\n hook_post_receive_file = www_git + \"/hooks/post-receive\"\n\n if not os.path.exists(www_git):\n os.makedirs(www_git)\n git_init_command = \"cd \" + www_git\n git_init_command += \" && git init --bare\"\n subprocess.call(git_init_command, shell=True)\n\n if not os.path.isfile(hook_post_receive_file):\n with open(hook_post_receive_file, \"w\") as file:\n post_receive_content = \"#!/bin/sh\"\n post_receive_content += \"\\nGIT_WORK_TREE=\" + www_dir\n post_receive_content += \" git checkout -f\"\n file.write(post_receive_content)\n subprocess.call(\"chmod +x \" + hook_post_receive_file, shell=True)", "def __init__(self, repo, website, host='0.0.0.0', port='5252',\r\n home=os.getcwd(), new_website=False, create_admin=False, **kwargs):\r\n if repo and website:\r\n super().__init__(repo=repo, home=home, **kwargs)\r\n\r\n # Website Deployment Information:\r\n self.website = website\r\n self.web_host = host\r\n self.web_port = port\r\n # Path to Flask's Web-Server Files\r\n self.website_path = self.flask / Path(self.website)\r\n\r\n self.Kitchen = Oven(repo=self.repo, user=self.user,\r\n website=self.website, output_dir=self.flask)\r\n logmsg = 'The Website Management class variables have been set.'\r\n self.managementlog.info(logmsg)\r\n\r\n if new_website is True:\r\n logmsg = 'The website cookie is being prepared for the Oven.'\r\n self.managementlog.info(logmsg)\r\n self.Kitchen.bake_the_website(host=self.web_host,\r\n port=self.web_port,\r\n website_path=self.website_path)", "def update_conf_file():\n filepath = remote_dir + \"/apache2/conf/httpd.conf\"\n fabric.contrib.files.sed(filepath, 'myproject', project_name)", "def test_replace_project(self):\n pass", "def deploy():\n build()\n copy()\n install()", "def deploy_pagebrowser(where=None, restart=True):\n config = get_config(where)\n with settings(host_string=config['host_string']), cd(config['installation_dir']):\n # run('cd src/INGSearch && git pull')\n run('cd src/INGBookDasa && git checkout master')\n run('cd src/INGBookDasa && git pull')\n if restart:\n run('bin/circusctl restart pagebrowser')", "def project_clone_view(user_data, cache):\n return ProjectCloneCtrl(cache, user_data, dict(request.json)).to_response()", "def sync_git_repo():\n # get the current dir of this script\n current_dir = os.path.dirname(os.path.realpath(sys.argv[0]))\n repo_path = os.path.join(current_dir,REPO_NAME)\n logging.info(\"Repository path is: \"+repo_path)\n # check to see if a repo has been init already\n try: \n repo = git.Repo(repo_path)\n logging.info(\"Git repo has already been created.\")\n except (git.exc.InvalidGitRepositoryError,git.exc.NoSuchPathError):\n logging.info(\"No git repo has been initialized for this module. Cloning from github.com now.\")\n repo_url = \"https://\"+REPO_USERNAME+\":\"+REPO_PERSONAL_ACCESS_TOKEN+\"@github.com/\"+REPO_USERNAME+\"/\"+REPO_NAME+\".git\"\n git.Repo.clone_from(repo_url,repo_path)\n logging.info(\"Repo cloned successfully.\")\n repo = git.Repo(repo_path)\n # now we have a valid repo created \n # pull the latest data from the repo\n origin = repo.remotes.origin\n origin.pull()\n # create the csv output dir if it does not exist\n Path(paho_csv_reports_dir).mkdir(parents=False, exist_ok=True)\n # get all csv files in this dir\n all_paho_csv_files = glob.glob(paho_csv_reports_dir+os.path.sep+\"*.csv\")\n # add all files in this dir to the repo index\n repo.index.add(all_paho_csv_files)\n logging.info(\"Added all .csv files from \"+paho_csv_reports_dir+\" to repo index.\")\n # set the commit message\n repo.index.commit(\"Automatic commit by \"+os.path.basename(__file__))\n # git push \n origin.push()\n logging.info(\"All csv files pushed to github repo successfully.\")", "def cloneDB():\n print(\"::cloning db\")\n filepath = confighome+\"config\"\n\n # open config to get credentials for ssh \n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n creds=jconfig[0]\n\n # locally clone the \"db\"\n cmd_full=\"git clone \"+creds['db']['username']+\"@\"+creds['db']['host']+\":swrss_database\"\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)", "def deploy(config, args):\n log = logging.getLogger('kraftwerk.deploy')\n \n # TODO better way to detect new, or maybe move to dedicated command\n stdout, stderr = args.node.ssh('stat /var/service/%s' % args.project.name, pipe=True)\n new = bool(stderr) or args.override\n \n # Sync codebase over with the web user\n destination = 'web@%s:/web/%s/' % (args.node.hostname, args.project.name)\n stdout, stderr = args.project.rsync(destination)\n if stderr:\n log.error(\"Sync error: %s\" % stderr)\n sys.exit(stderr)\n \n # Copy requirements\n args.project.copy(args.node, 'requirements.txt')\n \n # Put together the setup script\n cmd = config.template(\"scripts/project_setup.sh\", \n project=args.project, new=new, \n upgrade_packages=args.upgrade_packages)\n stdout, stderr = args.node.ssh(cmd, pipe=True)\n if stderr:\n print stderr\n \n # TODO detect new services\n if not args.no_service_setup and new:\n for service in args.project.services():\n args.node.ssh(service.setup_script)\n \n print u\"%s live at %r\" % (args.project.canonical_domain(), args.node.hostname)", "def sync(args: argparse.Namespace) -> None:\n\tdel args\n\trepo_path = _find_repo()\n\tmanifest_file = os.path.join(repo_path, MANIFEST_DIRECTORY, storas.manifest.DEFAULT_MANIFEST_FILE)\n\tmanifest = storas.manifest.load(manifest_file)\n\tfor project in manifest.projects:\n\t\tfull_path = os.path.join(repo_path, \"..\", project.path)\n\t\tremote = project.remote\n\t\tfull_fetch_url = urllib.parse.urljoin(remote.fetch_host, project.name)\n\t\tif not os.path.exists(full_path):\n\t\t\tos.makedirs(full_path, exist_ok=True)\n\t\t\tLOGGER.debug(\"Created '%s'\", full_path)\n\t\t\t_run_git([\"clone\", \"-b\", project.revision, full_fetch_url], cwd=full_path)", "def bootstrap(tag, settings='production'):\n deploy_dir = \"%s%s/example\" % (env.site_root, tag)\n virtualenv = \"%s/virtualenv\" % deploy_dir\n\n # Bootstrap the code\n with cd(deploy_dir):\n run(\"python bootstrap.py\")\n with prefix('source %s/bin/activate' % virtualenv):\n run(\"./manage.py collectstatic --noinput --verbosity 0\")\n run(\"./manage.py syncdb\")\n # run(\"./manage.py migrate --delete-ghost-migrations\")", "def cmd_apps__create(args):\n \n if args.name is None:\n args.name = os.path.basename(os.getcwd())\n\n url = remote.create_project(args.name)\n \n if in_git_repo():\n if get_push_url('tinyserv') is None:\n git(None, 'remote', 'add', 'tinyserv', url)\n print \"Added remote 'tinyserv'.\"\n else:\n print \"This repository is already configured for app '%s'.\" % \\\n _get_current_project_name()\n \n print \"Remote repository URL is %s.\" % url", "def install(self, project, acl=None):\n self.config.options['project_name'] = project.name\n self.config.options['show_right_bar'] = True\n super(ForgeWikiApp, self).install(project, acl=acl)\n\n root_page_name = self.default_root_page_name\n Globals(app_config_id=c.app.config._id, root=root_page_name)\n self.upsert_root(root_page_name)", "def create_project(project_name, default, helloworld, api, spa):\n # getting arguments and options from the locals() function\n options = locals()\n # project_name is removed since we want to browse through options and project_name isn't necessary\n options.pop('project_name')\n\n # if none of the options was selected, fall back to default\n if [i for i in options.values()].count(True) == 0:\n options['default'] = True\n\n # seeing if there are more than 2 options selected\n elif [i for i in options.values()].count(True) > 1:\n error_exit(\"Please make sure only 1 option is selected and try again.\")\n\n # seeing if project_name matches any of directories in the current directory\n try:\n create_folder(project_name)\n\n except FileExistsError:\n error_exit(\n 'That directory already exists. Please check your project name and try again.')\n\n # printing when project creation is starting\n click.echo(NEWLINE + 'Creating a new Flask app in ' +\n colored(f'~/{project_name}', 'green') + '.')\n click.echo(NEWLINE)\n\n # create venv if helloworld option is not selected\n if not helloworld:\n create_venv(f'./{project_name}/venv/')\n\n # deciding which boilerplate to choose and creating it based on argument choice\n base_dir = os.path.dirname(__file__)\n\n # iterating over names and values in options dictionary\n for name, value in options.items():\n if value:\n choice = os.path.join(base_dir, name)\n # copy the boilerplate filetree to the project folder\n try:\n copy_filetree(choice, f\"./{project_name}/\")\n except Exception as e:\n error_exit(e)\n\n # output hell starts here\n click.echo(\n f'Success! Created app {project_name} in {os.getcwd()}'+f'/{project_name}')\n click.echo('Inside that directory you can run several commands:')\n click.echo(NEWLINE)\n\n # print commands and descriptions\n print_command('python run.py',\n 'Starts the server, default config is set to development.')\n if not helloworld:\n\n print_command('export secret_key=STRING',\n 'Sets the secret key for your app.')\n\n print_command('export PRODUCTION=True',\n 'Sets production config for your app. Setting it to False will set the development config.')\n\n print_command('source venv/bin/activate (unix) \\n\\t./venv/Scripts/activate (windows)',\n 'Activate the virtual enviroment for the app.')\n\n print_command('pip install -r requirements.txt',\n 'Install the packages listed in requirements.txt into the venv.')\n\n click.echo('We suggest that you start by typing:')\n click.echo(colored('\\tcd ', 'cyan') + colored(project_name, 'white'))\n click.echo(colored('\\tsource venv/bin/activate' if not system()\n == 'Windows' else '\\t./venv/Scripts/activate', 'cyan'))\n click.echo(colored('\\tpip install -r ', 'cyan') +\n colored('requirements.txt', 'white'))\n click.echo(colored('\\tpython run.py', 'cyan'))\n else:\n click.echo('We suggest that you start by typing:')\n click.echo(colored('\\tcd ', 'cyan') + colored(project_name, 'white'))\n click.echo(colored('\\tpip install flask ', 'cyan'))\n click.echo(colored('\\tpython app.py'))\n\n click.echo(NEWLINE + 'Happy hacking!')", "def backup(ctx, project, origin, force):\n\n if not check_main_conf(ctx):\n return\n\n if origin is not None and project is None:\n click.echo(\"--project option is required when --origin is set.\")\n return\n\n bkp = ctx.obj[\"bkp\"]\n\n if not os.path.exists(ctx.obj[\"PROJECTS_DIR\"]):\n click.echo(\"Projects directory doesn't exists at %s\" % ctx.obj[\"PROJECTS_DIR\"])\n return\n\n if project is not None:\n bkp.project_load(project_name=project)\n bkp.backup(origin=origin, force=force)\n else:\n for file in os.listdir(ctx.obj[\"PROJECTS_DIR\"]):\n if file.endswith(\".conf\"):\n project_name = file.replace(\".conf\", \"\")\n bkp.project_load(project_name=project_name)\n bkp.backup(origin=origin, force=force)", "def create_gitignore(project_name):\n\twith io.FileIO(\".gitignore\", \"w\") as file:\n\t\tfile.write(\"# Directories #\\n###################\\nbin/\\n\\n\"\n\t\t\t\t\"# OS generated files #\\n###################\\n\"\n\t\t\t\t\".DS_Store\\n._*\\n.nfs*\\n\\n\"\n\t\t\t\t\"# Compiled source #\\n###################\\n\"\n\t\t\t\t\"a.out\\n*.o\")\n\tshutil.move('.gitignore', project_name)\n\tprint \"Created project .gitignore file.\"" ]
[ "0.6597053", "0.6376039", "0.6335648", "0.61842006", "0.6173729", "0.6145058", "0.6005152", "0.59648216", "0.595788", "0.59347206", "0.5933034", "0.5923805", "0.59103745", "0.58390486", "0.5812732", "0.57964295", "0.572337", "0.56827134", "0.5680887", "0.5663389", "0.56439286", "0.5631816", "0.5591635", "0.5576353", "0.55736345", "0.5573325", "0.55700004", "0.5548547", "0.5543566", "0.55360085", "0.5527349", "0.55083567", "0.54899114", "0.54898924", "0.5466962", "0.5465481", "0.5454233", "0.54517424", "0.5409576", "0.5399829", "0.5372394", "0.537193", "0.53716725", "0.53344643", "0.53319204", "0.5325535", "0.53178877", "0.53082263", "0.53049374", "0.5303556", "0.5303556", "0.5303556", "0.5284039", "0.5273906", "0.5269484", "0.526764", "0.52634555", "0.5244718", "0.5244559", "0.5242046", "0.52370405", "0.5232682", "0.52269775", "0.5221019", "0.52205074", "0.5179021", "0.517735", "0.51746774", "0.516894", "0.51611793", "0.5150878", "0.5148608", "0.51469916", "0.5132243", "0.51277995", "0.5125293", "0.51139766", "0.5109168", "0.51063436", "0.510416", "0.5100524", "0.5091415", "0.50828093", "0.507666", "0.50711113", "0.50707304", "0.50687814", "0.505882", "0.5049555", "0.50409067", "0.5027663", "0.5013189", "0.50124836", "0.5007951", "0.5006497", "0.50036025", "0.5002756", "0.49950516", "0.4994089", "0.4993342" ]
0.7331329
0
Adds the "/static" and "/media" directories to the static webapp if needed, and deletes the default index.html. Also adds a project/project/static directory if there isn't one.
def add_dirs_to_static(static_webapp_name): static_dir = '$HOME/webapps/%s' % static_webapp_name with settings(warn_only=True): with cd(static_dir): run("mkdir static && mkdir media") run("rm index.html") run("touch index.html") with cd(code_dir): run("mkdir %s/static" % project_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def include_static_files(app):\n file_path = sphinx_prolog.get_static_path(STATIC_FILE)\n if file_path not in app.config.html_static_path:\n app.config.html_static_path.append(file_path)", "def ensure_static_exists():\n for entry in html_static_path:\n static_path = os.path.join(__repo_docs__, entry)\n if not os.path.isdir(static_path):\n os.makedirs(static_path)", "def copy_static_resources(self):\n if not hasattr(settings, 'STATIC_ROOT'):\n raise MissingStaticRoot()\n destination = os.path.join(STORAGE_PATH, 'static')\n if os.path.exists(destination):\n shutil.rmtree(destination)\n shutil.copytree(settings.STATIC_ROOT, destination)", "def add_static_paths(app):\n app.env.book_theme_resources_changed = False\n\n output_static_folder = Path(app.outdir) / \"_static\"\n theme_static_files = resources.contents(theme_static)\n\n if (\n app.config.html_theme_options.get(\"theme_dev_mode\", False)\n and output_static_folder.exists()\n ):\n # during development, the JS/CSS may change, if this is the case,\n # we want to remove the old files and ensure that the new files are loaded\n for path in output_static_folder.glob(\"sphinx-book-theme*\"):\n if path.name not in theme_static_files:\n app.env.book_theme_resources_changed = True\n path.unlink()\n # note sphinx treats theme css different to regular css\n # (it is specified in theme.conf), so we don't directly use app.add_css_file\n for fname in resources.contents(theme_static):\n if fname.endswith(\".css\"):\n if not (output_static_folder / fname).exists():\n (output_static_folder / fname).write_bytes(\n resources.read_binary(theme_static, fname)\n )\n app.env.book_theme_resources_changed = True\n\n # add javascript\n for fname in resources.contents(theme_static):\n if fname.endswith(\".js\"):\n app.add_js_file(fname)", "def deploy_static(): \n from fabdeploy.django import collectstatic as django_collectstatic\n# run(\"rm -rf %(root_path)s%(project_name)s/static/*\" % env) # call again git_add_commit_pull\n django_collectstatic()", "def serve_static_files(request, path, insecure=False, **kwargs):\n\n if not settings.DEBUG and not insecure:\n raise Http404\n normalized_path = posixpath.normpath(unquote(path)).lstrip('/')\n absolute_path = finders.find(normalized_path)\n if not absolute_path:\n if path.endswith('/') or path == '':\n raise Http404(\"Directory indexes are not allowed here.\")\n raise Http404(\"'%s' could not be found\" % path)\n document_root, path = os.path.split(absolute_path)\n return static.serve(request, path, document_root=document_root, **kwargs)", "def index():\n return flask.send_from_directory(\"static\", \"index.html\")", "def path_static():\n return os.path.abspath(os.path.dirname(__file__))+'/_static'", "def static_text_files():\n return send_from_directory(\"static/\", request.path[1:])", "def install_project_structure():\n from .project import static_base, use_static\n\n with sudo():\n info('Install application directory structure')\n\n create_app_root()\n\n if use_static():\n # Create static web paths\n static_path = os.path.join(static_base(), 'static')\n media_path = os.path.join(static_base(), 'media')\n debian.mkdir(static_path, group='www-data', mode=1775)\n debian.mkdir(media_path, group='www-data', mode=1775)", "def serve_static(request, path, document_root):\n # Clean up given path to only allow serving files below document_root.\n path = posixpath.normpath(urllib.unquote(path))\n path = path.lstrip('/')\n newpath = ''\n for part in path.split('/'):\n if not part:\n # Strip empty path components.\n continue\n drive, part = os.path.splitdrive(part)\n head, part = os.path.split(part)\n if part in (os.curdir, os.pardir):\n # Strip '.' and '..' in path.\n continue\n newpath = os.path.join(newpath, part).replace('\\\\', '/')\n if newpath and path != newpath:\n return HttpResponseRedirect(newpath)\n fullpath = os.path.join(document_root, newpath)\n if os.path.isdir(fullpath):\n #if show_indexes:\n # return directory_index(newpath, fullpath)\n raise Http404, \"Directory indexes are not allowed here.\"\n if not os.path.exists(fullpath):\n raise Http404, '\"%s\" does not exist' % fullpath\n # Respect the If-Modified-Since header.\n statobj = os.stat(fullpath)\n if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),\n statobj[stat.ST_MTIME], statobj[stat.ST_SIZE]):\n return HttpResponseNotModified()\n mimetype = mimetypes.guess_type(fullpath)[0] or 'application/octet-stream'\n # Treat the file as a django template\n template = Template(open(fullpath, 'rb').read())\n context = RequestContext(request)\n # Render the template giving the current request\n contents = template.render(context)\n response = HttpResponse(contents, mimetype=mimetype)\n response[\"Last-Modified\"] = http_date(statobj[stat.ST_MTIME])\n response[\"Content-Length\"] = len(contents)\n return response", "def setup_output_path(self):\n self.logger.info('setting up output path')\n try:\n self.output_path.mkdir()\n except FileExistsError:\n pass\n try:\n (self.output_path / 'simple').mkdir()\n except FileExistsError:\n pass\n for filename in resource_listdir(__name__, 'static'):\n if filename == 'index.html':\n # Skip template\n continue\n with (self.output_path / filename).open('wb') as f:\n source = resource_stream(__name__, 'static/' + filename)\n f.write(source.read())\n source.close()", "def static(path):\n return static_file(path, root='media')", "def glr_path_static():\n return os.path.join(base_path, \"static\")", "def deploy_static_media(env=None, asset_version='', quick=False, haus_vars={}):\n print green('Deploying static media {}'.format('__quick__' if quick else ''))\n collectstatic(no_input=True, skip_admin=quick)", "def make_static_tween(app, handler):\n # TODO allow turning off.\n # TODO Get path from config.\n static_app = DirectoryApp(\n os.path.join(os.path.dirname(__file__), app.static_path),\n index_page=None\n )\n\n def static_tween(request):\n if request.path_info_peek() == 'static':\n request.path_info_pop()\n return static_app(request)\n return handler(request)\n\n return static_tween", "def collect_static_files():\n with env.cd(settings.PROJECT_PATH), prefix(COMMANDS['set_environment']), \\\n prefix(COMMANDS['activate_virtualenv']):\n env.run('python rnacentral/manage.py collectstatic --noinput')", "def add(app, url = None, path = None, endpoint=None, index='index.html'):\n url = url or app.static_url_path or ''\n path = os.path.abspath(path or app.static_folder or '.')\n endpoint = endpoint or 'static_' + os.path.basename(path)\n\n if path == app.static_folder:\n if url != app.static_url_path:\n raise ValueError('Files in `{}` path are automatically served on `{}` URL by Flask.'\n ' Use different path for serving them at `{}` URL'.format(path, app.static_url_path, url))\n else:\n @app.route(url + '/<path:filename>', endpoint = endpoint)\n def static_files(filename):\n return send_from_directory(path, filename)\n\n if index:\n @app.route(url + '/', endpoint = endpoint + '_index')\n def static_index():\n return send_from_directory(path, index)\n\n if url:\n @app.route(url, endpoint = endpoint + '_index_bare')\n def static_index_bare():\n return send_from_directory(path, index)", "def test_does_static_directory_exist(self):\n does_static_dir_exist = os.path.isdir(self.static_dir)\n does_css_static_dir_exist = os.path.isdir(os.path.join(self.static_dir, 'css'))\n does_js_static_dir_exist = os.path.isdir(os.path.join(self.static_dir, 'js'))\n \n self.assertTrue(does_static_dir_exist, f\"{FAILURE_HEADER}The static directory was not found in the expected location. Check and try again.{FAILURE_FOOTER}\")\n self.assertTrue(does_css_static_dir_exist, f\"{FAILURE_HEADER}The css subdirectory was not found in your static directory.{FAILURE_FOOTER}\")\n self.assertTrue(does_js_static_dir_exist, f\"{FAILURE_HEADER}The js subdirectory was not found in your static directory.{FAILURE_FOOTER}\")", "def __get_server_static__(app_path,static_dir):\n import os\n # from . import config_loader\n\n # root_path = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\n _path = (static_dir).replace(\"/\", os.path.sep)\n return os.sep.join([app_path, _path])", "def static(path):\n return bottle.static_file(path, root='static/')", "def static(path):\n return bottle.static_file(path, root='static/')", "def static(path):\n return bottle.static_file(path, root='static/')", "def static(path):\n return bottle.static_file(path, root='static/')", "def create_project_dir():\r\n with settings(warn_only=True):\r\n run('mkdir -p %s/packages' % (env.path,))\r\n run('mkdir %s/log' % (env.path,))\r\n run('mkdir -p %s/media/uploads' % (env.path,))\r\n run('mkdir -p %s/collected_static' % (env.path,))\r\n # change permissions for writable folder\r\n cmd = env.host_settings.get('make_folder_world_writeable','chown -R www-data:www-data')\r\n if cmd:\r\n run('%s %s/media' % (cmd, env.path))\r\n run('%s %s/collected_static' % (cmd, env.path))", "def get_swagger_static_root():\n return os.path.join(CURDIR, \"static\")", "def create_path_and_index(subdir: str) -> None:\n if not os.path.exists(WEBOUT_PATH + subdir):\n os.makedirs(WEBOUT_PATH + subdir)\n create_blank_index(WEBOUT_PATH + subdir + \"index.html\")", "def copy_static(self, outdir):\n pass", "def update_static_files(self):\n\n params = self.chose_param_value(\"--static\")\n self._check_path_availability([\"get_static_dir\", \"get_static_dir_to\"])\n if self._check_whether_has_params(params):\n self.updater.update_files(\n self.analizer.get_static_dir(),\n self.analizer.get_static_dir_to(),\n params\n )\n return self.write_debug_message(\"Static files upgrade is done!\\n\")\n return self.write_error_message(\"You haven't passed any params about static files\")", "def cp_static_files(self,inpath,outpath): \n if inpath==self.static_dir:\n dest=os.path.join(outpath,os.path.basename(inpath))\n if os.path.exists(dest):\n logger.warning('Remove old static folder')\n shutil.rmtree(dest) #not efficient. Should do it incrementaly...\n logger.info('cp_static_files %s -> %s' %(inpath,dest))\n copyfiles(inpath,dest) \n else:\n for folder in os.listdir(inpath):\n if folder == 'static':\n logger.info('found static folder, copy all...')\n dest=os.path.join(outpath,folder)\n src=os.path.join(inpath,folder)\n if os.path.exists(dest):\n logger.warning('Remove old static folder')\n shutil.rmtree(dest) #not efficient. Should do it incrementaly...\n logger.info('cp_static_files %s -> %s' %(src,dest))\n copyfiles(src,dest)\n return 0", "def generate_sample_static_angular(env_root):\n repo_dir = os.path.join(env_root, 'static-angular')\n\n if os.path.isdir(repo_dir):\n LOGGER.error(\"Error generating sample repo -- directory %s \"\n \"already exists!\",\n repo_dir)\n sys.exit(1)\n\n shutil.copytree(\n os.path.join(ROOT, 'templates', 'static-angular'),\n repo_dir\n )\n convert_gitignore(os.path.join(repo_dir, 'sample-app'))\n\n LOGGER.info(\"Sample static Angular site repo created at %s\",\n repo_dir)\n LOGGER.info('(see its README for setup and deployment instructions)')", "def build():\n local('python manage.py build \\\n --skip-static --settings={{ project_name }}.settings.production')\n\n # hack to move whole directory over to build\n local('cd {} && mv static/* build/'.format(settings.BASE_DIR))", "def update_config(self, config):\n toolkit.add_template_directory(config, 'templates')\n toolkit.add_public_directory(config, 'public')\n toolkit.add_resource('fanstatic', 'syngenta')", "def _prep_client_dist_for_project(project_env, project_root_dir):\n #need to make the project's index.html the index.html that tomcat will find\n clientdir = os.path.join(project_root_dir, 'client')\n index_target_fn = os.path.join(clientdir, 'index.html')\n\n index_src_base = 'index.' + project_env.get_project_name() + '.html'\n index_src_fn = os.path.join(clientdir, index_src_base)\n cmd = 'cp ' + index_src_fn + ' ' + index_target_fn\n cr = container_users.make_host_user_command_runner()\n result = cr.run(cmd)\n return result.get_exit_code()", "def server_static (filename):\n return static_file(filename, root=\"./static\")", "def collect_static():\n\n check_promt = (\n not env.prompt or\n console.confirm(\n \"Collect static files and copy them to collect_static?\",\n default=True,\n )\n )\n\n if check_promt:\n with cd(\"%s\" % env.work_path):\n with prefix(\"source %s/bin/activate\" % env.env_path):\n run(\n \"./manage.py collectstatic\"\n \" --noinput\"\n )", "def index():\n return app.send_static_file('index.html')", "def index():\n return app.send_static_file(\"index.html\")", "def collectstatic():\n sudo(env.activate)\n sudo('cd %s' % env.whole_path_symlinked + '/aurora; python manage.py collectstatic;')", "def static_files(filename):\n static_path = os.path.join(frontend.root_path, 'templates', current_app.config['FRONTEND_THEME'], 'static')\n return send_from_directory(static_path, filename)", "def delete_previous_files(schema_name, path_template, path_static):\n list_file_static = listdir(path_static)\n list_file_template = listdir(path_template)\n if schema_name in list_file_static:\n tree_path = path.join(path_static, schema_name)\n rmtree(tree_path, ignore_errors=True)\n html_file_name = \"wrap_\" + schema_name + \".html\"\n if html_file_name in list_file_template:\n html_file_path = path.join(path_template, html_file_name)\n remove(html_file_path)", "def touch_project():\n remote('touch config/wsgi*')", "def main(methods=[\"GET\"]):\n validate_auth()\n ## issue with path resolution after build\n return send_from_directory(\n #todo: remove templates directory reference; index.html isn't a jinja template\n safe_join(current_app.static_folder, 'templates'),\n 'index.html',\n cache_timeout=-1\n )", "def mount_static_directory(self, prefix, directory, remote=False,\n index_page=None):\n prefix = tuple(prefix.strip('/').split('/'))\n if remote or re.match(r'https?://', directory):\n directory = RemoteDirectory(directory)\n else:\n directory = abs_path(directory)\n directory = LocalDirectory(directory, index_page=index_page)\n self.register('static_directory', directory, prefix)", "def collectstatic(where=None):\n config = get_config(where)\n with settings(host_string=config['host_string']), cd(config['installation_dir']):\n run('bin/django collectstatic --noinput')", "def root():\n return send_from_directory('../web/dist', 'index.html')", "def generate_sample_static_react(env_root):\n repo_dir = os.path.join(env_root, 'static-react')\n\n if os.path.isdir(repo_dir):\n LOGGER.error(\"Error generating sample repo -- directory %s \"\n \"already exists!\",\n repo_dir)\n sys.exit(1)\n\n shutil.copytree(\n os.path.join(ROOT, 'templates', 'static-react'),\n repo_dir\n )\n convert_gitignore(os.path.join(repo_dir, 'sample-app'))\n\n LOGGER.info(\"Sample static React site repo created at %s\",\n repo_dir)\n LOGGER.info('(see its README for setup and deployment instructions)')", "def update_config(self, config):\n p.toolkit.add_template_directory(config, \"templates\")\n p.toolkit.add_public_directory(config, 'public')", "def index(self):\n\n # 'html/index.html' is the path WITHIN the tarball.\n return self.static('html/index.html')", "def generate_static_site(self, output_root=None, extra_context=None):\n self.app.config['BUILD_PATH'] = output_root\n\n # use this hook for registering URLs to freeze\n self.call_hook(\"generate\", self, output_root, extra_context)\n\n if output_root is not None:\n # realpath or this gets generated relative to the tarbell package\n self.app.config['FREEZER_DESTINATION'] = os.path.realpath(output_root)\n\n self.freezer.freeze()", "def server_static(filepath):\n root_folder = os.path.abspath(os.path.dirname(__file__))\n return bottle.static_file(filepath, root=os.path.join(root_folder, 'static'))", "def update_config(self, config):\n p.toolkit.add_template_directory(config, 'templates')\n p.toolkit.add_public_directory(config, 'public')", "def server_static(self, filepath):\n root = os.path.join(self.webbase, 'assets')\n return static_file(filepath, root=root)", "def static(request):\n return {\n 'STATIC_URL': getattr(settings, 'STATIC_URL', settings.MEDIA_URL)\n }", "def upload():\n env.user = 'webcontent'\n rsync_project(DOCDIR, 'doc/_build/html/', delete=True)", "def send_from_static(filename, **kwargs):\n return send_from_directory(app.static_folder, filename, **kwargs)", "def static(website, request, **etc):\n return website.static.respond(request)", "def test_fingerprinting_off(self):\n static = '/static/css/style.css'\n self.assertEqual(self.site.get_url_for_static(static), static)\n self.assertFileExists(os.path.join(self.site.build_path, self.site.get_url_for_static(static)[1:]))", "def collectstatic():\n puts(yellow(\"Collect statics\"))\n django_manage('collectstatic', '-l', '--noinput')", "def source_paths(self):\n paths = self.config.get('static_dirs')\n if paths:\n return paths\n return [self.config.get('static_dir')]", "def remove_fake_project_dir(request):\n def fin_remove_fake_project_dir():\n if os.path.isdir('fake-project'):\n utils.rmtree('fake-project')\n request.addfinalizer(fin_remove_fake_project_dir)", "def staticFile(path):\n logger.info('[FLASKWEB /fs] Static File Request for `%s`' % path)\n local = os.path.join(webapp.config['DIR'], path)\n if not os.path.exists(local):\n return returnError(\"File not found: %s\" % path, 404)\n if os.path.isdir(local):\n contents = sorted(os.listdir(local))\n for i, f in enumerate(contents):\n if os.path.isdir(f):\n contents[i] += '/'\n\n if request.headers['Accept'] == 'application/json':\n return jsonify(dict(cwd=local, contents=contents)), 200\n else:\n return render_template('listing.html', cwd=path, listing=contents), 200\n\n else:\n if 'stdout' in local or 'output' in local or local.split('.')[-1] in ['txt', 'yaml', 'yml', 'json', 'log']:\n with open(local, 'r') as file:\n # output = unicode(file.read(), 'utf-8')\n output = file.read()\n\n if request.headers['Accept'] == 'application/json':\n return output, 200\n else:\n return render_template(\"output.html\", output=output)\n\n return send_from_directory(webapp.config['DIR'], path)", "def toplevel_static(folder, filename):\n filename = safe_join(folder, filename)\n cache_timeout = app.get_send_file_max_age(filename)\n return send_from_directory(app.static_folder, filename,\n cache_timeout=cache_timeout)", "def dispatch(self, request, *args, **kwargs):\n try:\n self.copy_static_resources()\n except MissingStaticRoot:\n self.template_name = 'general_error.html'\n kwargs['error'] = _('There is no STATIC_ROOT defined in the settings file')\n return super().dispatch(request, *args, **kwargs)\n except Exception as e:\n self.template_name = 'general_error.html'\n kwargs['error'] = str(e)\n return super().dispatch(request, *args, **kwargs)\n cms_pages = Page.objects.filter(publication_date__isnull=False)\n for page in cms_pages:\n languages = page.get_languages()\n for language in languages:\n url = page.get_public_url(language)\n if url not in self.done:\n self.done.append(url)\n static_page_path = '{}{}index.html'.format(STORAGE_PATH, url)\n fetch_url = \"{}{}\".format(self.SOURCE_DOMAIN, url)\n response = requests.get(fetch_url)\n make_dir(url)\n with open(static_page_path, 'w') as file:\n file.write(response.text)\n return super().dispatch(request, *args, **kwargs)", "def build_finished(app, exception):\n if app.config.offline_skin_js_path is not None:\n copy_static_entry(path.join(app.builder.srcdir, app.config.offline_skin_js_path), path.join(app.builder.outdir, '_static'), app.builder)\n if app.config.offline_wavedrom_js_path is not None:\n copy_static_entry(path.join(app.builder.srcdir, app.config.offline_wavedrom_js_path), path.join(app.builder.outdir, '_static'), app.builder)", "def prepare(skip_static=False):\n\n local('npm install')\n local('grunt build')\n\n with warn_only():\n local('git add staticfiles')\n local('git add {{ project_name }}/templates')\n local('git commit -m \"PRODUCTION ONLY: Build static files.\"')\n\n files_to_remove = (\n '.bowerrc',\n '.editorcinfig',\n '.gitignore',\n '.jshintrc',\n 'bower.json',\n 'dev-only-package.json',\n 'error.log',\n 'fabfile.py',\n 'Gruntfile.js',\n 'migrate.sh',\n 'README.md',\n 'serve.sh',\n 'flush_cache.py',\n )\n\n with warn_only():\n for file_ in files_to_remove:\n local('git rm {}'.format(file_))\n\n # store it\n local('git commit -m \"PRODUCTION ONLY: Removing files.\"')\n\n if skip_static:\n local('touch .skipDjango')\n local('git add .skipDjango')\n local('git commit -m \"PRODUCTION ONLY: Skip static files\"')", "def files_serve(path):\n return flask.send_from_directory(\"static/js\", path)", "def create_assets():\n assets = {}\n\n # Load all static files\n for root, dirs, files in os.walk(STATIC_DIR):\n for fname in files:\n filename = os.path.join(root, fname)\n with open(filename, \"rb\") as f:\n assets[os.path.relpath(filename, STATIC_DIR)] = f.read()\n\n # Collect pages\n pages = {}\n for fname in os.listdir(PAGES_DIR):\n if fname.lower().endswith(\".md\"):\n name = fname.split(\".\")[0].lower()\n with open(os.path.join(PAGES_DIR, fname), \"rb\") as f:\n md = f.read().decode()\n pages[name] = Page(name, md)\n\n # todo: Collect blog posts\n\n # Get template\n with open(os.path.join(THIS_DIR, \"template.html\"), \"rb\") as f:\n html_template = f.read().decode()\n\n with open(os.path.join(THIS_DIR, \"style.css\"), \"rb\") as f:\n css = f.read().decode()\n css += \"/* Pygments CSS */\\n\" + HtmlFormatter(style=\"vs\").get_style_defs(\n \".highlight\"\n )\n\n # Generate pages\n year = datetime.now().year\n for page in pages.values():\n page.prepare(pages.keys())\n title = TITLE if page.name == \"index\" else TITLE + \" - \" + page.name\n menu = create_menu(page)\n html = html_template.format(\n title=title, style=css, body=page.to_html(), menu=menu, year=year\n )\n print(\"generating\", page.name + \".html\")\n assets[page.name + \".html\"] = html.encode()\n\n # Fix backslashes on Windows\n for key in list(assets.keys()):\n if \"\\\\\" in key:\n assets[key.replace(\"\\\\\", \"/\")] = assets.pop(key)\n\n return assets", "def copy_static(root_directory, dist_directory, sdk_directory):\n\n for static in configuration.STATICS:\n context = {\n \"root\": root_directory,\n \"sdk\": sdk_directory,\n \"dist\": dist_directory\n }\n\n source = templates.from_string(static[\"source\"], context)\n target = templates.from_string(static[\"target\"], context)\n target = os.path.join(dist_directory, target)\n\n # Perform the action.\n sys.stdout.write(\"Copying '%s'\\n\" % source)\n\n if static[\"type\"] == \"directory\":\n recursive_overwrite(source, target)\n else:\n shutil.copy(source, target)", "def test_static_routes(self, request_client):\n rv = request_client.get(\"/static/images/Group.jpg\")\n assert \"200\" in str(rv.status)\n\n rv = request_client.get(\"/\")\n assert \"200\" in str(rv.status)", "def deleteIndexFileIfExists(self):\n try:\n os.remove(self.dir+'/index.html')\n except OSError:\n pass", "def assets(self):\n static = self.static\n if static is None:\n return None\n\n assets = os.path.join(static, 'assets')\n if not os.path.isdir(assets):\n return None\n\n return assets", "def server_static(filepath):\n return bottle.static_file(filepath, root=STATIC_ROOT)", "def handle_noargs(self, **options):\r\n for staticfiles_dir in getattr(settings, \"STATICFILES_DIRS\", []):\r\n # Cribbed from the django-staticfiles app at:\r\n # https://github.com/jezdez/django-staticfiles/blob/develop/staticfiles/finders.py#L52\r\n if isinstance(staticfiles_dir, (list, tuple)):\r\n prefix, staticfiles_dir = staticfiles_dir\r\n\r\n # Walk over the current static files directory tree,\r\n # preprocessing files that have a template extension.\r\n for root, dirs, files in os.walk(staticfiles_dir):\r\n for filename in files:\r\n outfile, extension = os.path.splitext(filename)\r\n # We currently only handle Mako templates\r\n if extension == \".mako\":\r\n self.__preprocess(os.path.join(root, filename),\r\n os.path.join(root, outfile))", "def index(self):\n return open(os.path.join(self.staticdir, \"index.html\"))", "def upload():\n run('mkdir -p /srv/images/'+env.project_name+'/')\n rsync_project(\n env.project_dir, './',\n exclude=(\n '.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store', 'environment.yml',\n 'fabfile.py', 'Makefile', '.idea', 'bower_components', 'node_modules',\n '.env.example', 'README.md', 'var'\n ), delete=True)", "def addMobileStaticResourceDir(self, dir: str) -> None:\n self.__rootMobileResource.addFileSystemRoot(dir)", "def update_project():\n _require_environment()\n\n # Grants write rights on log dir for the admin group\n log_dir = '%s/log' % _interpolate(VIRTUALENV_DIR)\n if files.exists(log_dir):\n sudo('chmod -R g+w %s' % log_dir)\n\n # Updates from git, issues Django syncdb, South migrate, Collecstatic and resets Apache\n branch = env.project.get('git_branch', 'master')\n with prefix(_django_prefix()):\n with cd(_django_project_dir()):\n with settings(hide('warnings'), warn_only=True):\n run('git fetch origin %s:%s' % (branch, branch))\n run('git checkout %s' % branch)\n with settings(hide('warnings'), warn_only=True):\n run('git pull origin %s' % branch)\n run('django-admin.py syncdb --noinput')\n run('django-admin.py migrate')\n run('touch config/wsgi*')\n run('django-admin.py collectstatic --noinput')", "def test_serve_built_files(self):\r\n self.mkbundle('file1', 'file2', output=\"out\").build()\r\n # I tried using the test client for this, but it would\r\n # need to be setup using StaticFilesHandler, which is\r\n # incompatible with the test client.\r\n from django_assets.finders import AssetsFinder\r\n assert AssetsFinder().find('out') == self.path(\"media/out\")", "def statics(file, type='img'):\n return bottle.static_file(file, root=HOME+STATIC_PATH+'/'+type)", "def test_site_created(self):\n self.assertEqual(self.app.site_name, 'MDWeb')\n # pylint: disable=W0212\n self.assertEqual(self.app._static_folder, '/my/theme/assets')\n self.assertIsNotNone(self.app.navigation)\n self.assertGreater(len(self.app.pages), 0)", "def collectstatic():\n local(\"docker-compose exec web python3 manage.py {}\".format(\n 'collectstatic --noinput'))", "def update_config(self, config):\n toolkit.add_template_directory(config, 'templates')\n toolkit.add_resource('fanstatic', 'mingus')\n return", "def add_url_rules(self):\n self.app.add_url_rule(\"/\", \"root\",\n lambda: self.file_renderer(\"index.html\"),\n methods=[\"GET\", \"POST\", \"PUT\", \"DELETE\"])\n self.app.add_url_rule(\"/<path:path>\", \"all_files\", lambda path:\n self.file_renderer(path), methods=[\"GET\", \"POST\",\n \"PUT\",\n \"DELETE\"])", "def wsgi_application(static_path=None, debug=False, log_level=logging.WARNING,\n template_paths=None, cookie_name=None, static_map=None):\n global template_loader, session_store\n session_store = FilesystemSessionStore()\n template_loader = TemplateLoader(template_paths or ['templates'],\n auto_reload=debug)\n\n if isinstance(static_map, basestring):\n static_map = load_static_map(static_map)\n application = Application(cookie_name=cookie_name, debug=debug,\n static_root='/static', static_map=static_map)\n if debug:\n application = DebuggedApplication(application, evalex=True)\n log_level = logging.DEBUG\n logging.basicConfig(level=log_level)\n logging.getLogger('werkzeug').setLevel(log_level)\n logging.getLogger().setLevel(log_level)\n if not static_path:\n static_path = os.path.join(os.getcwd(), 'static')\n application = SharedDataMiddleware(application, {'/static': static_path})\n return application", "def _find_static_directory(self, path):\n if self.has_any('static_directory'):\n prefix = ()\n segments = tuple(path.lstrip('/').split('/'))\n for segment in segments:\n prefix += (segment,)\n if self.contains('static_directory', prefix):\n return prefix, segments[len(prefix):]\n return None, None", "def deploy():\n def mkdirp(dir):\n if not os.path.exists(dir):\n os.makedirs(dir)\n\n def copytree(f1, f2):\n if osp.exists(f2):\n shutil.rmtree(f2)\n shutil.copytree(f1, f2)\n\n def symlink(p1, p2):\n if osp.exists(p2):\n try:\n os.remove(p2)\n except:\n shutil.rmtree(p2)\n os.symlink(osp.abspath(p1), p2)\n\n def pathof(modpath):\n mod = __import__(modpath)\n path = os.path.dirname(mod.__file__)\n return path\n\n app = flask.Flask(__name__, static_url_path='/static')\n ping_viz_path = osp.join(pathof('ping'), 'viz')\n roygbiv_web_path = osp.join(pathof('roygbiv'), 'web')\n\n try:\n\n # Poster\n mkdirp('deploy')\n copytree('2015', 'deploy/2015')\n\n # Brain\n copytree(roygbiv_web_path, 'deploy/brain')\n for fil in glob.glob('brain/*.html'):\n shutil.copy(fil, 'deploy/' + fil)\n shutil.copy('brain/two_hemis.html', 'deploy/brain/index.html')\n for fil in glob.glob('brain/css/*') + glob.glob('brain/js/*'):\n shutil.copy(fil, 'deploy/' + fil)\n mkdirp('deploy/brain/data')\n copytree('generated/data/fsaverage', 'deploy/brain/data/fsaverage') # data\n\n # Manhattan\n mkdirp('deploy/gwas')\n copytree(osp.join(ping_viz_path, 'manhattan'), 'deploy/gwas')\n shutil.copyfile('deploy/gwas/manhattan.html', 'deploy/gwas/index.html')\n mkdirp('deploy/gwas/data')\n for fil in glob.glob('generated/data/*.json'):\n shutil.copyfile(fil, os.path.join('deploy/gwas/data', os.path.basename(fil)))\n\n # scatter / similarity plots\n copytree('generated/plots', 'deploy/plots')\n\n # Create the default page.\n with open('deploy/index.html', 'w') as fp:\n fp.write(serve_index())\n\n # Finally, try and reduce snp file size.\n with open('deploy/gwas/data/SNPS_all.json', 'r') as fp:\n snps = simplejson.load(fp)\n with open('deploy/gwas/data/GWAS_MRI_cort_area_ctx_frontalpole_AI__Age_At_IMGExam.json', 'r') as fp:\n gwas = simplejson.load(fp)\n snps = dict([(k, v) for k, v in snps.items()\n if k in gwas[gwas.keys()[0]]])\n with open('deploy/gwas/data/snps_all.json', 'w') as fp:\n simplejson.dump(snps, fp)\n\n except Exception as e:\n print(\"Error deploying: %s\" % e)\n\n def serve():\n app.route('/')(serve_index)\n\n @app.route('/<path:path>')\n def serve_brain_data(path):\n return flask.send_from_directory('deploy', path)\n app.run()\n serve()", "def static(self, game_id, filename):\n #Get the game\n game = games.GAME_DICT[game_id]\n\n #Find the path to load\n path = os.path.join(game.static_folder, filename)\n\n #Check we are not going outside the static_folder\n prefix = os.path.commonprefix([path, game.static_folder])\n if not prefix.startswith(game.static_folder):\n raise Exception('Cannot get static files outside the static_folder!')\n\n return cherrypy.lib.static.serve_file(path)", "def nullx(self):\n return open(os.path.join(self.staticdir, \"index.html\"))", "def setup_theme(app):\r\n theme = app.config['THEME']\r\n app.template_folder = os.path.join('themes', theme, 'templates')\r\n app.static_folder = os.path.join('themes', theme, 'static')", "def test_static_url_map_static_asset_path(self):\r\n self.make_course(pdf_textbooks=[PORTABLE_PDF_BOOK], static_asset_path='awesomesauce')\r\n url = self.make_url('pdf_book', book_index=0, chapter=1)\r\n response = self.client.get(url)\r\n self.assertNotContains(response, 'file={}'.format(PORTABLE_PDF_BOOK['chapters'][0]['url']))\r\n self.assertNotContains(response, 'file=/c4x/{0.org}/{0.course}/asset/{1}'.format(\r\n self.course.location,\r\n PORTABLE_PDF_BOOK['chapters'][0]['url'].replace('/static/', '')))\r\n self.assertContains(response, 'file=/static/awesomesauce/{}'.format(\r\n PORTABLE_PDF_BOOK['chapters'][0]['url'].replace('/static/', '')))", "def make_static_assets(opts):\n\n css_filename = do_css(opts['css_source_dir'], opts['out_dir'])\n js_filename = do_js(opts['js_source_dir'], opts['out_dir'])\n return {\n 'primary_css': css_filename,\n 'js': js_filename\n }", "def static(resource_root, filepath):\n if resource_root not in ('resources', 'js'):\n abort(404)\n\n return static_file(filepath, root=resource_root)", "def lesson_static_generator_dir(lesson_slug, static_dir, search_dir):\n if not search_dir.exists():\n return\n\n for static_file in search_dir.iterdir():\n\n if static_file.is_dir():\n yield from lesson_static_generator_dir(lesson_slug, static_dir, static_file)\n continue\n\n relative = static_file.relative_to(static_dir)\n\n yield (\"lesson_static\", {\"lesson\": lesson_slug, \"path\": str(relative)})", "def deploy():\n setup()\n builddir = get_build_dir()\n if sys.platform == 'win32':\n # Support cygwin rsync on windows:\n build_path = cygpath(slashed(builddir))\n else:\n build_path = slashed(builddir)\n rsync_project(env.admin_webroot, build_path, exclude=\".*\", delete=True)\n sudo(\"chmod -R 755 %(admin_webroot)s\" % env)", "def static(filename):\n\ttimestamp = os.path.getmtime(os.path.join(app.static_folder, filename))\n\treturn \"%s/%s?%s\" % (app.static_url_path, filename, timestamp)", "def static(filename):\n\ttimestamp = os.path.getmtime(os.path.join(app.static_folder, filename))\n\treturn \"%s/%s?%s\" % (app.static_url_path, filename, timestamp)", "def djangofy(project):\n click.echo(click.style('Making Vue.js {project} into django app'.format(project=project), bg='blue', fg='white'))\n urls_py = URLS_TEMPLATE.format(project=project)\n try:\n os.makedirs('{project}/templates/{project}/'.format(project=project))\n except OSError:\n click.echo(click.style('Command already executed', fg='red'))\n sys.exit(0)\n with cd(project):\n touch('__init__.py')\n touch('index.html', 'templates/{project}/'.format(project=project))\n with open('urls.py', 'w') as f:\n f.write(urls_py)\n with open('package.json', 'r+') as f:\n pakckage_json = json.loads(''.join(f.readlines()), object_pairs_hook=OrderedDict)\n pakckage_json['scripts']['build'] += ' && pyvue djbuild {project}'.format(project=project)\n f.seek(0)\n f.write(json.dumps(pakckage_json, indent=2))\n with cd('config'):\n with open('index.js', 'r+') as f:\n lines = f.readlines()\n f.seek(0)\n for line in lines:\n f.write(line\n .replace('../dist/index.html', '../templates/{project}/index.html'.format(project=project))\n .replace('../dist', '../static')\n .replace(\"assetsSubDirectory: 'static'\",\n \"assetsSubDirectory: '{project}'\".format(project=project)))\n\n click.echo(click.style('Enjoy!', fg='green'))", "def test_serve_static_dj17_without_staticfiles_app(self, live_server, settings) -> None:\n with pytest.raises(HTTPError):\n urlopen(live_server + \"/static/a_file.txt\").read()", "def static(prefix, view=serve, **kwargs):\n if not prefix:\n raise ImproperlyConfigured(\"Empty static prefix not permitted\")\n elif not settings.DEBUG or urlsplit(prefix).netloc:\n # No-op if not in debug mode or a non-local prefix.\n return []\n return [\n re_path(\n r\"^%s(?P<path>.*)$\" % re.escape(prefix.lstrip(\"/\")), view, kwargs=kwargs\n ),\n ]" ]
[ "0.70106965", "0.68311924", "0.6625952", "0.63648784", "0.61918205", "0.60434896", "0.5981367", "0.58381754", "0.5810352", "0.57726264", "0.57547545", "0.57248443", "0.56464094", "0.5618542", "0.56139016", "0.5593905", "0.55286866", "0.55245644", "0.5519247", "0.55121106", "0.55010533", "0.55010533", "0.55010533", "0.55010533", "0.5496965", "0.5491143", "0.54505855", "0.54126155", "0.5390763", "0.53661925", "0.53403133", "0.5338361", "0.5320172", "0.5297789", "0.5280755", "0.52375996", "0.5234887", "0.5234879", "0.5221974", "0.52065384", "0.5205289", "0.5204435", "0.5203804", "0.5191193", "0.51806986", "0.5178613", "0.5166444", "0.51355803", "0.5129207", "0.51038325", "0.5103053", "0.5070712", "0.5052921", "0.50527054", "0.50385743", "0.50309384", "0.5027654", "0.5021094", "0.50206316", "0.501991", "0.50135744", "0.50124586", "0.50016636", "0.49991456", "0.49940884", "0.4988206", "0.49741164", "0.49500418", "0.49318802", "0.4926898", "0.4921649", "0.49133265", "0.4912484", "0.49042606", "0.49003604", "0.4893946", "0.48921993", "0.48921242", "0.48917654", "0.4871108", "0.48701307", "0.48567438", "0.48310208", "0.48151618", "0.48104", "0.4807115", "0.4795657", "0.47813994", "0.47762415", "0.4757539", "0.47531497", "0.47512305", "0.4751199", "0.47509426", "0.47419414", "0.47409043", "0.47409043", "0.47364947", "0.473441", "0.47255546" ]
0.7912355
0
Installs the necessary thirdparty apps into the local webapp (not globally) using pip. There is probably a better way to do this using a requirements file. Also appends a helpful comment to .bashrc_profile.
def pip_installs(): pip = r'pip-2.7 install --install-option="--install-scripts=$PWD/bin" --install-option="--install-lib=$PWD/lib/python2.7" ' with settings(warn_only=True): run("mkdir $HOME/tmp") with cd(remote_dir): for installation in install_list: run("export TEMP=$HOME/tmp && %s %s" % (pip, installation)) run("echo '#%s' >> $HOME/.bash_profile" % python_add_str)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup(ctx):\r\n ctx.run('pip3 install -r requirements.txt')", "def install_requirements():\n _git_pull()\n _install_requirements()\n _syncdb()\n _migrate()\n _restart_webserver()", "def install_requirements():\n local('. fabric_factory/ve/bin/activate; easy_install pip')\n local('. fabric_factory/ve/bin/activate; pip install -r requirements.txt')", "def install_requirements():\n run_commands('pip install -r ./requirements/dev.txt')", "def install_backend_deps():\n with lcd(BACKENDDIR):\n cmd = '%(pip)s install -r %(requirements_file)s' % {\n 'pip': get_pip(),\n 'requirements_file': requirements_file\n }\n local(cmd)\n # Install Pandoc\n local(\"sudo apt-get install pandoc\")\n # Install Pyandoc\n with lcd(HOMEDIR):\n if not os.path.isdir(os.path.join(HOMEDIR, 'pyandoc')):\n local(\"git clone [email protected]:kennethreitz/pyandoc.git\")\n with lcd(\"pyandoc\"):\n if not env.local:\n\t with prefix('. /home/ubuntu/virtualenvs/venv-system/bin/activate'):\n local(\"python setup.py install\")\n else:\n local(\"python setup.py install\")", "def install_requirements():\r\n if env.hosts:\r\n run ('cd %(path)s %(command_join)s env/bin/pip install -r current-release/requirements.txt' % env)\r\n else:\r\n local('%spip install -r requirements.txt' % virtualenv_bin, capture=False)", "def install(env, requirements, args, quiet=False):\n if os.path.isfile(requirements):\n args += ('-r', requirements)\n label = 'project'\n else:\n args += ('-U', '-e', '.')\n label = 'library'\n\n if not quiet:\n print('== Step 2. Install {0} =='.format(label))\n\n pip_cmd(env, ('install', ) + args, echo=not quiet)\n\n if not quiet:\n print()\n\n return True", "def install_requirements():\n run('source %(env_path)s/bin/activate; pip install -U -r %(repo_path)s/requirements.txt' % env)", "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def install_requirements():\n req_path = os.path.join(vlogger_dir, \"requirements.txt\")\n subprocess.call([\"pip\", \"install\", \"-r\", req_path])", "def install(self):\n\n self.clean_git_checkout(self.git_repo, '/src')\n\n self.__copy_config_templates();\n\n self.local(\"sudo pip install -r src/requirements.txt --upgrade\")\n\n if not self.is_local():\n PiService.install(self) #copy to remote\n\n self.sudo(\"pip install -r src/requirements.txt --upgrade\")", "def sub_install_python_requirements_aws():\n # Activate the virtualenv\n activate = 'source {0}/{1}/bin/activate'.format(\n env.virtualenv['dir'], env.virtualenv['name'])\n run(activate)\n\n # make sure the directory is there\n run('mkdir -p /home/ubuntu')\n\n # put the local directory '/Users/jenniferchen/github/HS698-project'\n # - it contains files or subdirectories\n # to the ubuntu server\n put('/Users/jenniferchen/github/HS698-project',\n '/home/ubuntu')\n\n # Install Python requirements\n install = 'pip install -r ' \\\n '/home/ubuntu/HS698-project/Flask_app/requirements.txt'\n\n # Join and execute the commands\n sudo(install)\n # Run the file app.py to start the Flask app\n dev_server = 'python HS698-project/Flask_app/app.py'\n run(dev_server)", "def install_requirements():\n with cd(env.code_dir):\n with _virtualenv():\n sudo('pip install -r requirements.txt', pty=True)", "def install_packages():\n with open(\"requirements.txt\", \"w\") as requirements_file:\n subprocess.run([\"pipenv\", \"lock\", \"-r\"], stdout=requirements_file)\n\n subprocess.run(\n [\"pip\", \"install\", \"-r\", \"requirements.txt\", \"--no-deps\", \"-t\", BUILD_DIR]\n )", "def sub_install_python_requirements():\n # Activate the virtualenv\n activate = 'source {0}/{1}/bin/activate'.format(\n env.virtualenv['dir'], env.virtualenv['name'])\n run(activate)\n\n # Install Python requirements\n install = 'pip install -r /vagrant/Flask_app/requirements.txt'\n\n # Join and execute the commands\n run(activate + '; ' + install)", "def pip_requirements():\n\n require(\n \"virtualenv_path\",\n \"requirements_path\",\n \"http_proxy\",\n \"https_proxy\",\n \"sudo_user\",\n )\n cmd = \"pip install --quiet --requirement %s\" % env.requirements_path\n\n # append packages url if specified\n if env.get(\"packages_url\") is not None:\n cmd += \" -f %s\" % env.get(\"packages_url\")\n\n with context_managers.proxy(env.http_proxy, env.https_proxy):\n with context_managers.virtualenv(env.virtualenv_path):\n sudo(cmd, user=env.sudo_user)", "def install_requirements(self, requirements, extra_pip_args=None):\n\n pip_exe = os.path.join(self.scripts_dir, \"pip\")\n cmd = [pip_exe, \"install\", \"-r\", requirements] + (extra_pip_args or [])\n out = subprocess.Popen(cmd).communicate()", "def install():\n\n if (Path.cwd() / \"src\" / \"environment.yml\").is_file():\n call([\"conda\", \"install\", \"--file\", \"src/environment.yml\", \"--yes\"])\n\n pip_command = [\"install\", \"-U\", \"-r\", \"src/requirements.txt\"]\n\n if os.name == \"posix\":\n python_call(\"pip\", pip_command)\n else:\n command = [sys.executable, \"-m\", \"pip\"] + pip_command\n subprocess.Popen(command, creationflags=subprocess.CREATE_NEW_CONSOLE)", "def install_frontend_deps():\n\n with lcd(FRONTENDDIR):\n cmd = '%(npm)s install' % {'npm': get_npm()}\n local(cmd)\n cmd = '%(bower)s install' % {'bower': get_bower()}\n local(cmd)", "def install():\n verun('pip install -r {0}'.format(requirements))", "def bootstrap():\n _require_environment()\n\n adduser()\n install_python()\n install_git()\n install_apache()\n install_mysql()\n setup_project()", "def install_requirements():\n require(\"release\", provided_by=[deploy])\n with cd(\"%(path)s\" % env):\n sudo(\"./bin/pip install -r ./releases/%(release)s/requirements.txt\" % env)", "def install_django_project(self):\n\n from django.conf import settings as django_settings\n\n with cd(\"{0}\".format(self.app_remote_dir)):\n\n pip(\"install -r requirements.txt\")\n\n with cd(\"{0}\".format(self.app_package)):\n self.setup_settings_local()\n\n self.syncdb(django_settings)\n self.setup_gunicorn_supervisor()", "def set_up(dev=False):\n _install_dependencies()", "def update_requirements():\n\n with virtualenv(VIRTUALENV_PATH):\n cmd = ['pip install']\n cmd += ['--requirement %s' % os.path.join(CODE_DIR,'requirements.txt')]\n run(' '.join(cmd))", "def install():\n deploy()\n configure()", "def required():\n pip = path(\"bin/pip\")\n if not pip.exists():\n sh('%s install -E tg2env -r normal-reqs.txt --extra-index-url=http://www.turbogears.org/2.0/downloads/current/index' % pip)\n call_pavement('pavement.py', 'develop')", "def install_packages():\n\n require('environment', provided_by=env.environments)\n packages_file = os.path.join(PROJECT_ROOT, 'requirements', 'packages.txt')\n system.install_packages_from_file(packages_file)", "def install_deps():\n pipenv_dev = run('pipenv install --dev'.split(), check=True)\n print('Installed dependencies and virtual environment. Type `pipenv shell` to activate later.')", "def main() -> None:\n verify_pip_is_installed()\n print('Regenerating \"requirements.txt\" file...')\n install_python_dev_dependencies.compile_pip_requirements(\n 'requirements.in', 'requirements.txt')\n # Adds a note to the beginning of the 'requirements.txt' file to make sure\n # developers understand that they should not append or change this\n # autogenerated file.\n with utils.open_file(\n common.COMPILED_REQUIREMENTS_FILE_PATH, 'r+') as f:\n content = f.read()\n f.seek(0, 0)\n f.write(\n '# Developers: Please do not modify this auto-generated file. If\\n'\n '# you want to add, remove, upgrade, or downgrade libraries,\\n'\n '# please change the `requirements.in` file, and then follow\\n'\n '# the instructions there to regenerate this file.\\n' + content)\n\n mismatches = get_mismatches()\n if mismatches:\n _rectify_third_party_directory(mismatches)\n validate_metadata_directories()\n else:\n print(\n 'All third-party Python libraries are already installed correctly.')", "def install(self):\n other_args = list(requirement_args(self._argv, want_other=True))\n archive_path = join(self._temp_path, self._downloaded_filename())\n # -U so it installs whether pip deems the requirement \"satisfied\" or\n # not. This is necessary for GitHub-sourced zips, which change without\n # their version numbers changing.\n run_pip(['install'] + other_args + ['--no-deps', '-U', archive_path])", "def install():\n build()\n sh(\"%s setup.py develop\" % PYTHON)", "def install_deps():\n click.echo(\"install_deps\")", "def install(where='local'):\n config = get_config(where)\n print 'using configuration: %s' % config\n with settings(host_string=config['host_string']):\n if not files.exists(config['installation_dir']):\n run('git clone %(git_repo)s %(installation_dir)s' % config)\n with cd(config['installation_dir']):\n run('git submodule init')\n run('git submodule update --init')\n\n with settings(host_string=config['host_string']), cd(config['installation_dir']):\n run('python2.7 bootstrap.py -c %(cfg)s' % config)\n deploy(where)\n secs = 4\n sleep(secs)\n init_db(where)", "def install_deps():\n default = open('requirements.txt', 'r').readlines()\n new_pkgs = []\n links = []\n for resource in default:\n if 'git+https' in resource:\n pkg = resource.split('#')[-1]\n links.append(resource.strip())\n new_pkgs.append(pkg.replace('egg=', '').rstrip())\n else:\n new_pkgs.append(resource.strip())\n return new_pkgs, links", "def install_deps():\n dist = check_distribution()\n if dist == Distribution.TEXLIVE:\n texlive_install_deps()\n elif dist == Distribution.MIKTEX:\n miktex_install_deps()\n\n install_pygments()", "def bootstrap():\n sub_install_packages()\n sub_install_virtualenv()\n sub_create_virtualenv()\n sub_install_python_requirements()", "def setup():\n\n debs = (\"python-setuptools\", \"apache2\", \"libapache2-mod-wsgi\")\n\n require(\"hosts\", provided_by=[production, staging])\n sudo(\"apt-get install %s\" % \" \".join(debs))\n sudo(\"easy_install virtualenv pip\")\n sudo(\"mkdir -p %(path)s\" % env)\n with cd(\"%(path)s\" % env):\n sudo(\"mkdir -p releases; mkdir -p packages\")\n sudo(\"virtualenv --no-site-packages .\")\n sudo(\"mkdir -p /var/log/twit-demo; chown www-data:www-data /var/log/twit-demo\")", "def deps(ctx):\n header(deps.__doc__)\n with ctx.cd(ROOT):\n ctx.run(\n \"pip install -r requirements/develop.pip -r requirements/doc.pip\", pty=True\n )", "def requires(*requirements, **kwargs):\n if '/.tox/' in sys.executable:\n venv = os.path.dirname(os.path.dirname(sys.executable))\n elif env.virtual_env: # pragma: no cover\n venv = env.chut_virtualenv = env.virtual_env\n else: # pragma: no cover\n venv = os.path.expanduser(kwargs.get('venv', '~/.chut/venv'))\n if not env.pip_download_cache: # pragma: no cover\n env.pip_download_cache = os.path.expanduser('~/.chut/cache')\n sh.mkdir('-p', env.pip_download_cache)\n bin_dir = os.path.join(venv, 'bin')\n if bin_dir not in env.path: # pragma: no cover\n env.path = [bin_dir] + env.path\n requirements = list(requirements)\n if 'chut' not in requirements:\n requirements.insert(0, 'chut')\n if not test.d(venv): # pragma: no cover\n import urllib\n url = 'https://raw.github.com/pypa/virtualenv/master/virtualenv.py'\n urllib.urlretrieve(url, '/tmp/_virtualenv.py')\n sh[sys.executable]('-S /tmp/_virtualenv.py', venv) > 1\n sh.rm('/tmp/_virtualenv*', shell=True)\n info('Installing %s...' % ', '.join(requirements))\n sh.pip('install -qM', *requirements) > 1\n elif env.chut_virtualenv:\n upgrade = '--upgrade' in sys.argv\n if (env.chut_upgrade or upgrade): # pragma: no cover\n installed = ''\n else:\n installed = str(sh.pip('freeze')).lower()\n requirements = [r for r in requirements if r.lower() not in installed]\n if requirements: # pragma: no cover\n info('Updating %s...' % ', '.join(requirements))\n sh.pip('install -qM --upgrade', *requirements) > 1\n executable = os.path.join(bin_dir, 'python')\n if not env.chut_virtualenv: # pragma: no cover\n env.chut_virtualenv = venv\n os.execve(executable, [executable] + sys.argv, env)", "def install_dependencies():\n\n # check python version and verify we are using Python 3\n if sys.version[0] < '3':\n print(\"ERROR: python version 3 required. You are using version \"\n \"{}\".format(sys.version))\n print(\"You must install python 3 from https://www.python.org\")\n print(\"Make sure to check the 'pip' package manager option when\")\n print(\"installing python\")\n return\n try:\n import pip\n except ModuleNotFoundError:\n print(\"The python 'pip' package manager is required.\")\n print(\"Go to https://www.python.org and download Python 3\")\n print(\"When re-installing, select 'modify' and make sure\")\n print(\"to check the 'pip' option\")\n return\n\n print(\"Python 3 and pip is installed\")\n\n # upgrade/install dependencies such as robot framework\n subprocess.run([\"python\", \"-m\", \"pip\", \"install\", \"-q\", \"--user\",\n \"--no-warn-script-location\", \"-r\",\n os.path.join(os.path.curdir, \"requirements.txt\")],\n shell=True, check=True)\n print(\"Robot framework is installed and up to date\")\n print(\"PyQT5 is installed and up to date\")", "def _install_dependencies(self):\n\n requirements_file = self.app_directory.joinpath('requirements.txt')\n\n package_copy_required = False\n if requirements_file.exists():\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '-r',\n str(requirements_file),\n '-t',\n str(self.build_directory),\n ]\n package_copy_required = True\n else:\n cmd = [\n sys.executable,\n '-m',\n 'pip',\n 'install',\n '.',\n '-t',\n str(self.build_directory),\n ]\n\n logger.debug('Running subprocess cmds: %s', cmd)\n\n try:\n _ = subprocess.run(cmd, check=True)\n except Exception:\n logger.error('Pip failed to install the app using cmd=[%s].', cmd)\n raise\n\n if package_copy_required:\n shutil.copytree(\n self.package_dir, self.build_directory.joinpath(self.package_name)\n )", "def pip_install_req_file(req_file):\n pip_cmd = 'pip install -q --disable-pip-version-check --exists-action w'\n sh(f\"{pip_cmd} -r {req_file}\")", "def setup():\n require('hosts', provided_by=[prod])\n require('code_root')\n sudo('apt-get update')\n sudo('apt-get install -y python-setuptools')\n sudo('easy_install pip')\n sudo('pip install virtualenv')\n sudo('aptitude install -y apache2')\n sudo('aptitude install -y libapache2-mod-wsgi')\n sudo('apt-get install -y nginx')\n update_webserver_config()\n sudo('mkdir -p %s; cd %s; virtualenv .;' % (env.code_root, env.code_root))\n sudo('cd %s;mkdir releases; mkdir shared; mkdir packages; mkdir shared/media; mkdir shared/media/file;' % (env.code_root))\n deploy()", "def update_dependencies():\n pip = env.virtualenv.child('bin', 'pip')\n reqs = env.code_dir.child('deploy-requirements.txt')\n sudo('%s -q install -U pip' % pip)\n sudo('%s -q install -r %s' % (pip, reqs))", "def install_requirements(self, rel_path):\n self._log.debug(\"Installing requirements {}\".format(rel_path))\n\n rel_path = rel_path.replace(\"/\", os.path.sep)\n full_path = os.path.join(self._code_dir, rel_path)\n\n with open(full_path, \"rb\") as f:\n data = f.read()\n\n # this takes a fair amount of time sometimes, so if there's an\n # empty requirements.txt file, skip installing it\n actual_req_count = 0\n for line in data.split(\"\\n\"):\n line = line.strip()\n if line == \"\" or line.startswith(\"#\"):\n continue\n actual_req_count += 1\n if actual_req_count == 0:\n self._log.debug(\"Empty requirements.txt, skipping\")\n return\n\n try:\n threading.local().indentation = 0\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n \"-r\", full_path\n ])\n \n # this is expected - pip.main will *always* exit\n except SystemExit as e:\n # TODO\n raise Exception(\"Is SystemExit normal?\")\n\n threading.local().indentation = 0", "def install_for_spec(self):\n self.create_package_json()\n os.system('npm install json-refs')\n os.system('npm install json2yaml')\n os.system('npm install yamljs')\n os.system('npm install swagger-split') # package only required while splitting hence being installed here\n self.delete_package_json()", "def install(name):\n base = '/home/{}/venvs/{}/base.txt'.format(env.user, name)\n prod = '/home/{}/venvs/{}/prod.txt'.format(env.user, name)\n\n # Upload requirements file.\n put(utils.file_path('requirements', 'base.txt'), base)\n put(utils.file_path('requirements', 'prod.txt'), prod)\n\n # Activate the virtual environment.\n with prefix('source /home/{}/venvs/{}/bin/activate'.format(env.user, name)):\n run('pip install -r {}'.format(prod))", "def requirements(context):\n pip_compile = \"pip-compile --annotate --quiet\"\n\n command = (\n f\"{pip_compile} requirements/base.in \"\n f\"&& {pip_compile} requirements/local.in \"\n f\"&& {pip_compile} requirements/production.in\"\n )\n command = f\"run --rm django bash -c '{command}'\"\n run_command(context, get_local_user(), False, None, None, command)", "def local_install(self):\n import subprocess\n\n print(\"Making local install\")\n from pathlib import Path\n\n root = Path(__file__).parent.parent\n\n def run(args, shell=False):\n print(\"---\", \" \".join(args))\n return subprocess.check_call(args, cwd=curdir, shell=shell)\n\n def get_version():\n import json\n\n p = Path(curdir / \"package.json\")\n contents = json.loads(p.read_text())\n return contents[\"version\"]\n\n print(\"--- installing RobotFramework Language Server\")\n curdir = root / \"robotframework-ls\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robotframework-lsp-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())\n\n print(\"\\n--- installing Robocorp Code\")\n curdir = root / \"robocorp-code\"\n run(\"python -m dev vendor_robocorp_ls_core\".split())\n run(\"vsce package\".split(), shell=sys.platform == \"win32\")\n run(\n f\"code --install-extension robocorp-code-{get_version()}.vsix\".split(),\n shell=sys.platform == \"win32\",\n )\n run(\"python -m dev remove_vendor_robocorp_ls_core\".split())", "def sync_virtualenv(ctx):\n if not path.isfile(\"./pyenv/bin/pip\"):\n ctx.run(\"virtualenv --no-site-packages --python=/usr/bin/python2.7 pyenv\")\n ctx.run(\"PIP_DOWNLOAD_CACHE=/var/tmp/ ./pyenv/bin/pip install -r requirements.txt\")\n print(\"\"\"\n Installation completed. Please check any error messages above.\n\n If you are going to use `openstack` or ansible directly on the command line, run\n\n . ./pyenv/bin/activate\n\n or even add it to your ~/.bashrc\n \"\"\")", "def build_virtualenv():\n\n puts(yellow(\"Install dependencies from requirements.txt\"))\n with cd(env.source_dir):\n with prefix('source %s' % in_rwd('bin/activate')):\n sudo('pip install -r %s' % env.requirements_file,\n user=env.app_user)\n sudo('python setup.py develop', user=env.app_user)", "def update_requirements():\n with cd(REMOTE_REPO_DIR):\n cmd = ['npm install']\n # cmd += ['--requirement %s' % os.path.join(CODE_DIR,'requirements.txt')]\n run(' '.join(cmd))", "def deploy():\n git_pull()\n if confirm(\"Install/upgrade requirements with pip?\"):\n install_requeriments()\n django_command('collectstatic')\n django_command('migrate')\n restart()", "def set_installed_packages():\n global INSTALLED_PACKAGES, REQUIRED_VERSION\n if INSTALLED_PACKAGES:\n return\n\n if os.path.exists(BIN_PYTHON):\n pip = subprocess.Popen(\n (BIN_PYTHON, '-m', 'pip', 'freeze'),\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n (stdout, stderr) = pip.communicate()\n pip.wait()\n\n INSTALLED_PACKAGES = [normalize_package_name(r.decode().split('==')[0].lower()) for r in stdout.split()]\n REQUIRED_VERSION = next((package for package in INSTALLED_PACKAGES if re.match(r'^lore[!<>=]', package)), None)\n if REQUIRED_VERSION:\n REQUIRED_VERSION = re.split(r'[!<>=]', REQUIRED_VERSION)[-1]", "def install_prereqs():\n if no_prereq_install():\n print(NO_PREREQ_MESSAGE)\n return\n\n if not str2bool(os.environ.get('SKIP_NPM_INSTALL', 'False')):\n install_node_prereqs()\n install_python_prereqs()\n log_installed_python_prereqs()\n print_devstack_warning()", "def install_prereqs():\r\n if os.environ.get(\"NO_PREREQ_INSTALL\", False):\r\n return\r\n\r\n prereq_cache(\"Ruby prereqs\", [\"Gemfile\"], install_ruby_prereqs)\r\n prereq_cache(\"Node prereqs\", [\"package.json\"], install_node_prereqs)\r\n prereq_cache(\"Python prereqs\", PYTHON_REQ_FILES + [sysconfig.get_python_lib()], install_python_prereqs)", "def develop():\n dev_packages = [\n 'pytest', 'pytest-xdist', 'pytest-pep8', 'tox', 'httpie'\n ]\n if not path.exists(\"env\"):\n fab.local(\"virtualenv -p /usr/bin/python3 env\")\n fab.local(\"env/bin/pip install --upgrade pip setuptools\")\n fab.local(\"env/bin/python setup.py develop\")\n fab.local(\"env/bin/pip install {}\".format(\" \".join(dev_packages)))", "def install(cmd, reqs, add_to_global=False, prefer_final=True,\n force_upgrade=False, use_existing=False):\n # Remove anything we're upgrading\n if force_upgrade:\n uninstall_eggs(reqs)\n\n # Create installer class configured to install into wherever the command\n # class was setup for\n installer = Installer(dest=cmd.install_dir, index=cmd.index_url,\n prefer_final=prefer_final)\n\n # Now apply our runtime additions to its working environment. This includes\n # adding eggs for the egg cache, and removing eggs that we're forcing to be\n # upgraded.\n\n installer.update_search_path()\n\n # This is a bit nasty - we have to monkey-patch the filter for final\n # versions so that we can also filter for dev versions as well.\n\n # Set prefer_final to True, always, so it enables the filter\n easy_install.prefer_final(True)\n\n # This returns a WorkingSet of the packages we just installed.\n ws = None\n if use_existing:\n # NOTE here we pass in the existing stuff - this will prefer installed\n # packages over ones on the server, eg an installed release\n # version won't get trumped by a dev version on the server\n ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries)\n\n # We must remove the package we're installing from the 'baseline' ws.\n # This way we won't get any weird requirement conflicts with new\n # versions of the package we're trying to set up\n if cmd.distribution.metadata.name:\n dist = dependency.get_dist(cmd.distribution.metadata.name)\n if dist:\n dependency.remove_from_ws(ws, dist)\n\n # There's a chance that there were packages in setup_requires that were\n # also in install_requires. Because these are classed as 'already\n # installed' by the installer, they won't have been added to the workingset\n # of packages to set-up in the next step.\n\n # Here we ensure that they're added in along with any of their\n # own dependencies if they are also part of the package install_requires.\n # FIXME: this won't pick up non-direct dependencies.\n # Eg: setup_requires = numpy,\n # install_requires = something that has numpy as a dependency\n def also_required(dist):\n for req in pkg_resources.parse_requirements(reqs):\n if dist in req:\n return True\n return False\n\n setup_dists = [i for i in pkg_resources.working_set.resolve(\n get_setup_requires(cmd.distribution))\n if also_required(i)]\n\n if setup_dists:\n log.debug(\"setup_requires distributions to be set-up:\")\n [log.debug(\" %r\" % i) for i in setup_dists]\n\n # Now run the installer\n try:\n to_setup = installer.install(reqs, working_set=ws,\n use_existing=use_existing)\n except easy_install.MissingDistribution, e:\n log.error(e)\n # TODO: call missing distro hook here\n sys.exit(1)\n\n # Add any of the setup_requires dists to be set-up.\n to_setup = set(to_setup + setup_dists)\n if to_setup:\n log.debug('Packages to set-up:')\n for i in to_setup:\n log.debug(' %r' % i)\n\n # Now we selectively run setuptool's post-install steps.\n # Luckily, the buildout installer didnt strip off any of the useful\n # metadata about the console scripts.\n for dist in to_setup:\n if dist.location.startswith(manage.get_site_packages()):\n fix_permissions(dist)\n cmd.process_distribution(None, dist, deps=False)\n # Add the distributions to the global registry if we asked for it.\n # This makes the distro importable, and classed as 'already\n # installed' by the dependency resolution algorithm.\n if add_to_global:\n pkg_resources.working_set.add(dist)\n else:\n log.debug('Nothing to set-up.')\n return to_setup", "def install(cmd, reqs, add_to_global=False, prefer_final=True,\n force_upgrade=False, use_existing=False):\n # Remove anything we're upgrading\n if force_upgrade:\n uninstall_eggs(reqs)\n\n # Create installer class configured to install into wherever the command\n # class was setup for\n installer = Installer(dest=cmd.install_dir, index=cmd.index_url,\n prefer_final=prefer_final)\n\n # Now apply our runtime additions to its working environment. This includes\n # adding eggs for the egg cache, and removing eggs that we're forcing to be\n # upgraded.\n\n installer.update_search_path()\n\n # This is a bit nasty - we have to monkey-patch the filter for final\n # versions so that we can also filter for dev versions as well.\n\n # Set prefer_final to True, always, so it enables the filter\n easy_install.prefer_final(True)\n\n # This returns a WorkingSet of the packages we just installed.\n ws = None\n if use_existing:\n # NOTE here we pass in the existing stuff - this will prefer installed\n # packages over ones on the server, eg an installed release\n # version won't get trumped by a dev version on the server\n ws = pkg_resources.WorkingSet(pkg_resources.working_set.entries)\n\n # We must remove the package we're installing from the 'baseline' ws.\n # This way we won't get any weird requirement conflicts with new\n # versions of the package we're trying to set up\n if cmd.distribution.metadata.name:\n dist = dependency.get_dist(cmd.distribution.metadata.name)\n if dist:\n dependency.remove_from_ws(ws, dist)\n\n # There's a chance that there were packages in setup_requires that were\n # also in install_requires. Because these are classed as 'already\n # installed' by the installer, they won't have been added to the workingset\n # of packages to set-up in the next step.\n\n # Here we ensure that they're added in along with any of their\n # own dependencies if they are also part of the package install_requires.\n # FIXME: this won't pick up non-direct dependencies.\n # Eg: setup_requires = numpy,\n # install_requires = something that has numpy as a dependency\n def also_required(dist):\n for req in pkg_resources.parse_requirements(reqs):\n if dist in req:\n return True\n return False\n\n setup_dists = [i for i in pkg_resources.working_set.resolve(\n get_setup_requires(cmd.distribution))\n if also_required(i)]\n\n if setup_dists:\n log.debug(\"setup_requires distributions to be set-up:\")\n [log.debug(\" %r\" % i) for i in setup_dists]\n\n # Now run the installer\n try:\n to_setup = installer.install(reqs, working_set=ws,\n use_existing=use_existing)\n except easy_install.MissingDistribution, e:\n log.error(e)\n # TODO: call missing distro hook here\n sys.exit(1)\n\n # Add any of the setup_requires dists to be set-up.\n to_setup = set(to_setup + setup_dists)\n if to_setup:\n log.debug('Packages to set-up:')\n for i in to_setup:\n log.debug(' %r' % i)\n\n # Now we selectively run setuptool's post-install steps.\n # Luckily, the buildout installer didnt strip off any of the useful\n # metadata about the console scripts.\n for dist in to_setup:\n if dist.location.startswith(manage.get_site_packages()):\n fix_permissions(dist)\n cmd.process_distribution(None, dist, deps=False)\n # Add the distributions to the global registry if we asked for it.\n # This makes the distro importable, and classed as 'already\n # installed' by the dependency resolution algorithm.\n if add_to_global:\n pkg_resources.working_set.add(dist)\n else:\n log.debug('Nothing to set-up.')\n return to_setup", "def install():\n PackCommandExecutor().pack()\n InstallCommandExecutor().install()", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def install_pip():\n pip_install_txt = os.path.join(os.path.abspath(os.path.join(__file__, os.pardir)), \"build_test_dependencies.txt\")\n call_subprocess(\"python3 -m pip install -r %s\" % pip_install_txt)\n print(\"Stage install dependencies -- COMPLETED --\")", "def install_requires():\n return reqs(\"requirements.txt\")", "def develop():\n# Install package in development mode\n sh('python setup.py develop')", "def install_requires():\n return reqs('requirements.txt')", "def install():\n return InstallGit()", "def prep_app(build):\n build.packages.install(\"gunicorn\")", "def deploy(force_version=None):\n bundle_name = env.http_host\n bundle_root = '{0}/{1}'.format(\n env.get('bundle_root', run('pwd') + '/bundles'),\n bundle_name,\n )\n env.bundle_root = bundle_root\n run('mkdir -p %s/{log,conf,public}' % bundle_root)\n\n # virtualenv, Packages\n if not exists(bundle_root + '/env'):\n run('virtualenv --no-site-packages {0}/env'.format(bundle_root))\n run('{0}/env/bin/pip install -U pip'.format(bundle_root))\n\n local('python setup.py sdist')\n dists = [\n d for d in os.listdir(os.path.join(os.getcwd(),\n 'dist')) if d.endswith('.tar.gz')\n ]\n version_string = lambda d: d.rsplit('-', 1)[1][:-7]\n\n def int_or_s(num):\n try:\n return int(num)\n except ValueError:\n return num\n dist = sorted(dists, key=lambda d: map(int_or_s,\n version_string(d).split('.')))[-1]\n version = force_version or version_string(dist)\n dist_name = dist.rsplit('-', 1)[0]\n requirement = '{0}=={1}'.format(dist_name, version)\n\n packages = env.bundle_root + '/packages'\n run('mkdir -p {0}'.format(packages))\n if not exists('{0}/{1}'.format(packages, dist)):\n put('dist/{0}'.format(dist), '{0}/{1}'.format(packages, dist))\n\n has_vendor = 'vendor' in os.listdir(os.getcwd())\n if has_vendor:\n local_files = set(os.listdir(os.path.join(os.getcwd(), 'vendor')))\n uploaded = set(run('ls {0}'.format(packages)).split())\n diff = local_files - uploaded\n for file_name in diff:\n put('vendor/{0}'.format(file_name),\n '{0}/{1}'.format(packages, file_name))\n\n freeze = run('{0}/env/bin/pip freeze'.format(bundle_root)).split()\n if requirement in freeze and force_version is None:\n die(\"{0} is already deployed. Increment the version number to deploy \"\n \"a new release.\".format(requirement))\n\n cmd = ('{0}/env/bin/pip install -U {1} gunicorn gevent greenlet '\n 'setproctitle --find-links file://{2}'.format(\n bundle_root, requirement, packages,\n ))\n if 'index_url' in env:\n cmd += ' --index-url {0}'.format(env.index_url)\n run(cmd)\n env.path = bundle_root\n\n manage_envdir(bundle_root)\n\n if not 'staticfiles' in env:\n env.staticfiles = True\n if not 'cache' in env:\n env.cache = 0 # redis DB\n\n # Do we have a DB?\n result = run('psql -U postgres -l|grep UTF8')\n if bundle_name not in result:\n if 'gis' in env and env.gis is False:\n db_template = 'template0'\n else:\n db_template = 'template_postgis'\n run('createdb -U postgres -T {0} -E UTF8 {1}').format(db_template,\n bundle_name)\n\n if 'migrations' in env:\n if env.migrations != 'nashvegas':\n die(\"{0} is not supported for migrations.\".format(env.migrations))\n manage('upgradedb -l', noinput=False) # This creates the migration\n # tables\n\n installed = run('psql -U postgres {0} -c \"select id from '\n 'nashvegas_migration limit 1;\"'.format(bundle_name))\n installed = '0 rows' not in installed\n if installed:\n manage('upgradedb -e', noinput=False)\n else:\n # 1st deploy, force syncdb and seed migrations.\n manage('syncdb')\n manage('upgradedb -s', noinput=False)\n else:\n manage('syncdb')\n\n if env.staticfiles:\n manage('collectstatic')\n\n # Some things don't like dots\n env.app = env.http_host.replace('.', '')\n\n # Cron tasks\n if 'cron' in env:\n template('cron', '%(bundle_root)s/conf/cron' % env, use_sudo=True)\n sudo('chown root:root %(bundle_root)s/conf/cron' % env)\n sudo('chmod 644 %(bundle_root)s/conf/cron' % env)\n sudo('ln -sf %(bundle_root)s/conf/cron /etc/cron.d/%(app)s' % env)\n else:\n # Make sure to deactivate tasks if the cron section is removed\n sudo('rm -f %(bundle_root)s/conf/cron /etc/cron.d/%(app)s' % env)\n\n # Log rotation\n logrotate = '/etc/logrotate.d/%(app)s' % env\n template('logrotate', logrotate, use_sudo=True)\n sudo('chown root:root %s' % logrotate)\n\n # Nginx vhost\n changed = template('nginx.conf', '%s/conf/nginx.conf' % bundle_root)\n with cd('/etc/nginx/sites-available'):\n sudo('ln -sf %s/conf/nginx.conf %s.conf' % (bundle_root,\n env.http_host))\n with cd('/etc/nginx/sites-enabled'):\n sudo('ln -sf ../sites-available/%s.conf' % env.http_host)\n if 'ssl_cert' in env and 'ssl_key' in env:\n put(env.ssl_cert, '%s/conf/ssl.crt' % bundle_root)\n put(env.ssl_key, '%s/conf/ssl.key' % bundle_root)\n if changed: # TODO detect if the certs have changed\n sudo('/etc/init.d/nginx reload')\n\n # Supervisor task(s) -- gunicorn + rq\n if not 'workers' in env:\n env.workers = 2\n changed = template('supervisor.conf',\n '%s/conf/supervisor.conf' % bundle_root)\n with cd('/etc/supervisor/conf.d'):\n sudo('ln -sf %s/conf/supervisor.conf %s.conf' % (bundle_root,\n bundle_name))\n\n if 'rq' in env and env.rq:\n changed = True # Always supervisorctl update\n\n # RQ forks processes and they load the latest version of the code.\n # No need to restart the worker **unless** RQ has been updated (TODO).\n for worker_id in range(env.rq['workers']):\n env.worker_id = worker_id\n template(\n 'rq.conf', '%s/conf/rq%s.conf' % (bundle_root, worker_id),\n )\n with cd('/etc/supervisor/conf.d'):\n sudo('ln -sf %s/conf/rq%s.conf %s_worker%s.conf' % (\n bundle_root, worker_id, bundle_name, worker_id,\n ))\n\n # Scale down workers if the number decreased\n names = '/etc/supervisor/conf.d/{0}_worker*.conf'.format(bundle_name)\n workers = run('ls {0}'.format(names))\n workers_conf = run('ls {0}/conf/rq*.conf'.format(bundle_root))\n to_delete = []\n for w in workers.split():\n if int(w.split('{0}_worker'.format(bundle_name),\n 1)[1][:-5]) >= env.rq['workers']:\n to_delete.append(w)\n for w in workers_conf.split():\n if int(w.split(bundle_name, 1)[1][8:-5]) >= env.rq['workers']:\n to_delete.append(w)\n if to_delete:\n sudo('rm {0}'.format(\" \".join(to_delete)))\n\n if changed:\n sudo('supervisorctl update')\n run('kill -HUP `pgrep gunicorn`')\n\n # All set, user feedback\n ip = run('curl http://ifconfig.me/')\n dns = run('nslookup {0}'.format(env.http_host))\n if ip in dns:\n proto = 'https' if 'ssl_cert' in env else 'http'\n yay(\"Visit {0}://{1}\".format(proto, env.http_host))\n else:\n err(\"Deployment successful but make sure {0} points to {1}\".format(\n env.http_host, ip))", "def install_deps_temp(self):\n if self.distribution.install_requires:\n self.distribution.fetch_build_eggs(\n self.distribution.install_requires)\n if self.distribution.tests_require:\n self.distribution.fetch_build_eggs(self.distribution.tests_require)", "def setup(app):\n wheel = ensure_wheel()\n subprocess.check_call([\n \"jupyter\", \"lite\", \"build\", f\"--LiteBuildConfig.federated_extensions={wheel}\",\n ], cwd=DEMO)", "def deploy(force_version=None):\n bundle_name = env.http_host\n bundle_root = '%s/%s' % (env.get('bundle_root', run('pwd') + '/bundles'),\n bundle_name)\n env.bundle_root = bundle_root\n run('mkdir -p %s/{log,conf,public}' % bundle_root)\n\n # virtualenv, Packages\n if not exists(bundle_root + '/env'):\n run('virtualenv --no-site-packages %s/env' % bundle_root)\n run('%s/env/bin/pip install -U pip' % bundle_root)\n\n local('python setup.py sdist')\n dists = [\n d for d in os.listdir(os.path.join(os.getcwd(),\n 'dist')) if d.endswith('.tar.gz')\n ]\n version_string = lambda d: d.rsplit('-', 1)[1][:-7]\n def int_or_s(num):\n try:\n return int(num)\n except ValueError:\n return num\n dist = sorted(dists, key=lambda d: map(int_or_s,\n version_string(d).split('.')))[-1]\n version = force_version or version_string(dist)\n dist_name = dist.rsplit('-', 1)[0]\n requirement = '%s==%s' % (dist_name, version)\n\n packages = env.bundle_root + '/packages'\n run('mkdir -p %s' % packages)\n if not exists('%s/%s' % (packages, dist)):\n put('dist/%s' % dist, '%s/%s' % (packages, dist))\n\n has_vendor = 'vendor' in os.listdir(os.getcwd())\n if has_vendor:\n local_files = set(os.listdir(os.path.join(os.getcwd(), 'vendor')))\n uploaded = set(run('ls %s' % packages).split())\n diff = local_files - uploaded\n for file_name in diff:\n put('vendor/%s' % file_name, '%s/%s' % (packages, file_name))\n\n freeze = run('%s/env/bin/pip freeze' % bundle_root).split()\n if requirement in freeze and force_version is None:\n die(\"%s is already deployed. Increment the version number to deploy \"\n \"a new release.\" % requirement)\n\n cmd = '%s/env/bin/pip install -U %s gunicorn gevent greenlet setproctitle --find-links file://%s' % (\n bundle_root, requirement, packages\n )\n if 'index_url' in env:\n cmd += ' --index-url %(index_url)s' % env\n run(cmd)\n env.path = bundle_root\n python = run('ls %s/env/lib' % bundle_root)\n template(\n 'path_extension.pth',\n '%s/env/lib/%s/site-packages/_virtualenv_path_extensions.pth' % (\n bundle_root, python\n ),\n )\n\n if 'media_url' not in env:\n env.media_url = '/media/'\n if 'media_root' not in env:\n env.media_root = bundle_root + '/public' + env.media_url\n if 'static_url' not in env:\n env.static_url = '/static/'\n if 'static_root' not in env:\n env.static_root = bundle_root + '/public' + env.static_url\n if not 'staticfiles' in env:\n env.staticfiles = True\n if not 'cache' in env:\n env.cache = 0 # redis DB\n template('settings.py', '%s/settings.py' % bundle_root)\n template('wsgi.py', '%s/wsgi.py' % bundle_root)\n\n # Do we have a DB?\n database_creation()\n database_migration()\n\n if env.staticfiles:\n manage('collectstatic')\n\n # Some things don't like dots\n env.app = env.http_host.replace('.', '')\n\n # Cron tasks\n if 'cron' in env:\n template('cron', '%(bundle_root)s/conf/cron' % env, use_sudo=True)\n sudo('chown root:root %(bundle_root)s/conf/cron' % env)\n sudo('chmod 644 %(bundle_root)s/conf/cron' % env)\n sudo('ln -sf %(bundle_root)s/conf/cron /etc/cron.d/%(app)s' % env)\n else:\n # Make sure to deactivate tasks if the cron section is removed\n sudo('rm -f %(bundle_root)s/conf/cron /etc/cron.d/%(app)s' % env)\n\n # Log rotation\n logrotate = '/etc/logrotate.d/%(app)s' % env\n template('logrotate', logrotate, use_sudo=True)\n sudo('chown root:root %s' % logrotate)\n\n # Nginx vhost\n changed = template('nginx.conf', '%s/conf/nginx.conf' % bundle_root)\n with cd('/etc/nginx/sites-available'):\n sudo('ln -sf %s/conf/nginx.conf %s.conf' % (bundle_root,\n env.http_host))\n with cd('/etc/nginx/sites-enabled'):\n sudo('ln -sf ../sites-available/%s.conf' % env.http_host)\n if env.get('ssl_cert') and env.get('ssl_key'):\n put(env.ssl_cert, '%s/conf/ssl.crt' % bundle_root)\n put(env.ssl_key, '%s/conf/ssl.key' % bundle_root)\n if changed: # TODO detect if the certs have changed\n sudo('/etc/init.d/nginx reload')\n\n # Supervisor task(s) -- gunicorn + rq\n if not 'workers' in env:\n env.workers = 2\n changed = template('supervisor.conf',\n '%s/conf/supervisor.conf' % bundle_root)\n with cd('/etc/supervisor/conf.d'):\n sudo('ln -sf %s/conf/supervisor.conf %s.conf' % (bundle_root,\n bundle_name))\n\n if 'rq' in env and env.rq:\n changed = True # Always supervisorctl update\n handle_rq(bundle_name, bundle_root, env)\n\n if 'celery' in env and env.celery:\n changed = True\n handle_celery(bundle_name, bundle_root, env)\n\n if changed:\n sudo('supervisorctl update')\n run('kill -HUP `pgrep gunicorn`')\n\n # All set, user feedback\n ip = run('curl http://ifconfig.me/')\n dns = run('nslookup %s' % env.http_host)\n if ip in dns:\n proto = 'https' if 'ssl_cert' in env else 'http'\n yay(\"Visit %s://%s\" % (proto, env.http_host))\n else:\n err(\"Deployment successful but make sure %s points to %s\" % (\n env.http_host, ip))", "def install_strict_dependencies():\n strict_requirements_file = pkg_resources.resource_filename(\n \"starfish\", \"REQUIREMENTS-STRICT.txt\")\n subprocess.check_call([\n sys.executable, \"-m\", \"pip\", \"install\", \"-r\", strict_requirements_file\n ])", "def test_pip_install_with_extra_pypi_servers(self):\n package_manager.install_lib('foo')\n self.assertEqual(run_commands(), [\n 'pip install -E %s -r %s --extra-index-url=%s --extra-index-url=%s' % (\n self.test_env,\n self.req_path,\n 'http://localhost:8000/simple',\n 'http://pypi.internal.com/simple'\n )\n ])", "def update_requirements():\n\n check_prompt = (\n not env.prompt or\n console.confirm(\n \"Update virtualenv requirements based on requirements.txt file?\",\n default=True,\n )\n )\n\n if check_prompt:\n with cd(\"%s\" % env.repo_path):\n with prefix(\"source %s/bin/activate\" % env.env_path):\n run(\n \"pip install\"\n \" --requirement %s/requirements.txt\" % env.repo_path\n )", "def install():\n remote_egg_path = os.path.join(remote_egg_dir, get_egg_name())\n sudo('easy_install -U %s' % remote_egg_path)\n sudo('rm %s' % remote_egg_path)", "def install(self, egg, dir_path):", "def pip_install_requirements(virtualenv_path, requirements_path, cache_path, log_path):\n\n requirements_file = os.path.join(requirements_path, 'requirements.txt')\n log_file = os.path.join(log_path, 'pip.log')\n\n if not exists(requirements_file) or not exists(virtualenv_path):\n abort(red('Could not install packages. Virtual environment or requirements.txt not found.'))\n\n args = (virtualenv_path, requirements_file, cache_path, log_file)\n run('%s/bin/pip install -r %s --download-cache=%s --use-mirrors --quiet --log=%s' % args)", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def install(templatedir=DEFAULT_TEMPLATE_DIR, mediadir=DEFAULT_MEDIA_DIR,\n staticdir=DEFAULT_STATIC_DIR, runserverpath=DEFAULT_RUNSERVER_PATH,\n envdir=DEFAULT_ENV_DIR, django='1.3'):\n install_virtualenv(envdir)\n install_django(envdir, django)\n install_runserver(envdir, runserverpath, templatedir, mediadir, staticdir)", "def prod_server():\n sh(\"bin/pip freeze -r requirements.txt production/requirements.txt\")", "def install():\n execute(generate)\n execute(upload)", "def pip_install(*args):\n call(WITH_VENV, '.venv', 'pip', 'install', *args)", "def prepare():\n sh('pip install pylint pyflakes behave nose clonedigger pep8 sphinx')\n sh('pip install watchdog coverage ipython sphinx_rtd_theme')\n develop()", "def test_install(ctx):\n ctx.run(\"pip uninstall {PROJECT_NAME} --yes\".format(PROJECT_NAME=PROJECT_NAME), warn=True)\n ctx.run(\"pip install --no-cache-dir --no-index --find-links=file:./dist {PROJECT_NAME}\".format(PROJECT_NAME=PROJECT_NAME))\n ctx.run(\"pip uninstall {PROJECT_NAME} --yes\".format(PROJECT_NAME=PROJECT_NAME))", "def installRequiredPackages(self, force=False):\n # Need to install if forced or any packages cannot be imported\n needToInstall = force\n if not needToInstall:\n try:\n import jupyter\n import jupyterlab\n import ipywidgets\n import pandas\n import ipyevents\n import ipycanvas\n except:\n needToInstall = True\n\n if needToInstall:\n # Install required packages\n import os\n if os.name != 'nt':\n # PIL may be corrupted on linux, reinstall from pillow\n slicer.util.pip_install('--upgrade pillow --force-reinstall')\n\n slicer.util.pip_install(\"jupyter jupyterlab ipywidgets pandas ipyevents ipycanvas --no-warn-script-location\")\n\n # Install Slicer Jupyter kernel\n # Create Slicer kernel\n slicer.modules.jupyterkernel.updateKernelSpec()\n # Install Slicer kernel\n import jupyter_client\n jupyter_client.kernelspec.KernelSpecManager().install_kernel_spec(slicer.modules.jupyterkernel.kernelSpecPath(), user=True, replace=True)", "def _pip_install_requirements(\n install_path: str, requirements_path: str\n) -> None:\n verify_pip_is_installed()\n _run_pip_command([\n 'install', '--require-hashes', '--no-deps', '--target',\n install_path, '--no-dependencies', '-r', requirements_path, '--upgrade'\n ])", "def install_local(self) -> None:\n pass", "def pip_packages():\n packages = reduce(lambda a, x: \"%s %s\" % (a, x), PIP_PACKAGES, '')\n sudo(\"pip install %s &> /dev/null\" % packages)", "def texlive_install_deps():\n print('Installing dependencies...')\n subprocess.run([\"tlmgr\", \"install\"] + read_deps())\n print('Dependencies installed')", "def _install_direct_url(library_name: str, direct_url: str) -> None:\n pip_install(\n '%s#egg=%s' % (direct_url, library_name),\n common.THIRD_PARTY_PYTHON_LIBS_DIR,\n upgrade=True,\n no_dependencies=True)", "def setup():\n _confirm_branch()\n \n require('settings', provided_by=[production, staging])\n require('branch', provided_by=[stable, master, branch])\n \n setup_directories()\n setup_virtualenv()\n clone_repo()\n checkout_latest()\n install_requirements()\n install_apache_conf()\n deploy_to_s3()", "def install_requirement(req,\n path=None,\n extra_site_dirs=[],\n index='http://pypi.python.org/simple',\n repositories=['http://pypi.python.org/simple'],\n interpreter=PythonInterpreter.get()):\n\n # TODO(wickman) Consider importing the easy_install Command class directly and\n # manipulating it with initialize/finalize options + run.\n\n if not isinstance(req, pkg_resources.Requirement):\n if not os.path.exists(req):\n try:\n req = pkg_resources.Requirement.parse(req)\n except:\n raise TypeError(\n \"req should either be an installable file, a pkg_resources.Requirement \"\n \"or a valid requirement string. got %s\" % req)\n\n if path is None:\n path = tempfile.mkdtemp()\n\n if not os.path.exists(path):\n safe_mkdir(path)\n\n easy_install_args = [\n '--install-dir=%s' % path,\n '--site-dirs=%s' % ','.join([path] + extra_site_dirs),\n '--always-copy',\n '--multi-version',\n '--exclude-scripts',\n '-i', index]\n for repo in reversed(repositories):\n easy_install_args.extend(['-f', repo])\n easy_install_args.append(str(req))\n\n distributions_backup = set(pkg_resources.find_distributions(path))\n\n rc = ReqBuilder.run_easy_install([path] + extra_site_dirs + sys.path,\n easy_install_args, interpreter)\n\n distributions = set(pkg_resources.find_distributions(path))\n new_distributions = distributions - distributions_backup\n return new_distributions if rc else set()", "def update_requirements():\n\n require('code_root', provided_by=env.environments)\n requirements = os.path.join(env.code_root, 'requirements')\n sdists = os.path.join(requirements, 'sdists')\n base_cmd = ['pip install']\n base_cmd += ['-q -E %(virtualenv_root)s' % env]\n base_cmd += ['--no-index --find-links=file://%s' % sdists]\n # install GDAL by hand, before anything else that might depend on it\n cmd = base_cmd + ['--no-install \"GDAL==1.6.1\"']\n sudo(' '.join(cmd), user=env.deploy_user)\n # this directory won't exist if GDAL was already installed\n if files.exists('%(virtualenv_root)s/build/GDAL' % env):\n sudo('rm -f %(virtualenv_root)s/build/GDAL/setup.cfg' % env, user=env.deploy_user)\n with cd('%(virtualenv_root)s/build/GDAL' % env):\n sudo('%(virtualenv_root)s/bin/python setup.py build_ext '\n '--gdal-config=gdal-config '\n '--library-dirs=/usr/lib '\n '--libraries=gdal1.6.0 '\n '--include-dirs=/usr/include/gdal '\n 'install' % env, user=env.deploy_user)\n # force reinstallation of OpenBlock every time\n with settings(warn_only=True):\n sudo('pip uninstall -y -E %(virtualenv_root)s ebpub ebdata obadmin' % env)\n for file_name in ['ebpub.txt', 'ebdata.txt', 'obadmin.txt', 'openrural.txt']:\n apps = os.path.join(requirements, file_name)\n cmd = base_cmd + ['--requirement %s' % apps]\n sudo(' '.join(cmd), user=env.deploy_user)", "def git_install(projects_yaml):\n if git_install_requested():\n git_pre_install()\n projects_yaml = git_default_repos(projects_yaml)\n git_clone_and_install(projects_yaml, core_project='keystone')\n git_post_install(projects_yaml)", "def pre_install(self, installable_pkgs):\n pass", "def pre_installation(self):\n pass", "def install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])", "def _install(self):\n\n pass", "def install_requires():\n skip_install_requires = environ.get('SKIP_INSTALL_REQUIRES')\n if not skip_install_requires:\n with open('requirements.pip') as r:\n return r.readlines()\n return []" ]
[ "0.7519722", "0.7148218", "0.7090568", "0.7072766", "0.6987992", "0.6935873", "0.69101477", "0.6883953", "0.68306017", "0.6790433", "0.6782558", "0.67806673", "0.6776519", "0.67606586", "0.6741522", "0.67095673", "0.67082644", "0.6687862", "0.6645893", "0.6642955", "0.65967375", "0.6548646", "0.65178996", "0.64615154", "0.64350545", "0.64348435", "0.642677", "0.6425316", "0.63632506", "0.6350534", "0.6328363", "0.6321842", "0.63034034", "0.6285892", "0.62820554", "0.62800497", "0.6273809", "0.6238121", "0.62227887", "0.62003964", "0.6199523", "0.6183014", "0.61751467", "0.6167576", "0.615885", "0.6146018", "0.6140471", "0.61324435", "0.61197734", "0.609775", "0.6057677", "0.6044996", "0.6042461", "0.603341", "0.6015658", "0.6004895", "0.5964689", "0.5953387", "0.59249336", "0.59249336", "0.5875521", "0.5860852", "0.5858874", "0.5837615", "0.58211476", "0.58197886", "0.58155817", "0.5805345", "0.5796538", "0.57895136", "0.57888186", "0.5785594", "0.5774837", "0.5766973", "0.5760013", "0.5757526", "0.5752169", "0.5751284", "0.5740935", "0.57407564", "0.57314014", "0.57198584", "0.5712447", "0.57076156", "0.5707356", "0.5676993", "0.567556", "0.5670018", "0.5669826", "0.5668248", "0.5662385", "0.5655842", "0.5645204", "0.5642401", "0.5622438", "0.5620657", "0.56199855", "0.56196624", "0.5618356", "0.56150794" ]
0.6606969
20
Initialises the database to contain the tables required for DjangoCMS with South. Runs syncdb all and migrate fake.
def initialise_database(): with cd(code_dir): run(python_add_str + "python manage.py syncdb --all") run(python_add_str + "python manage.py migrate --fake")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_database():\n from django.core.management import call_command\n from django import setup\n setup()\n call_command('migrate', verbosity=0, interactive=False)\n call_command('loaddata', data('initial_data.json'), verbosity=0, interactive=False)", "def init_db():\n db = get_db()\n Page.create_table(db)\n PageVersion.create_table(db)\n User.create_table(db)", "def setup_database(self):\n self.db.setup_database()", "def _init_db(self):\n cursor = self._main_connection.cursor()\n cursor.execute(self.sql[\"create_table\"])\n self._main_connection.commit()", "def setup_db(self) -> None:\n conn = mysql.connector.connect(\n user=self.app.config[\"DATABASE_USER\"], password=self.app.config[\"DATABASE_PASSWORD\"],\n host=self.app.config[\"DATABASE_HOST\"], port=self.app.config[\"DATABASE_PORT\"], raise_on_warnings=True\n )\n try:\n cursor = conn.cursor()\n cursor.execute(\n \"CREATE DATABASE IF NOT EXISTS {} CHARACTER SET utf8\".format(self.app.config[\"DATABASE_NAME\"])\n )\n conn.commit()\n except:\n raise\n else:\n with self.DBManager(self.app) as connection:\n for model in sorted(lib.get_subclasses(lib.models.Model), key=lambda x: x.index):\n model.setup_table(connection=connection)\n finally:\n conn.close()", "def initialize_test_db(self):\n # Create a test database and sync it with models.py\n # Handle a second test database for selenium use. Postgres uses\n # transactions which interfere with the Django server thread.\n settings.TEST_DATABASE_NAME = self.db_name\n connection.creation.create_test_db(verbosity=self.verbosity,\n autoclobber=True)\n # Hook for doing any extra initialization\n self.extra_init()\n # Load fixture data.\n call_command('loaddata', *self.fixtures, verbosity=self.verbosity)\n # Sync data and close connection\n connection.close()\n # If sqlite3 or Postgres is used, create a backup database to speed up\n # fixture reloading.\n if settings.DATABASE_ENGINE == 'postgresql_psycopg2':\n # connection.creation is used to overcome transaction management,\n # allowing to execute DROP and CREATE db commands.\n cursor = connection.cursor()\n connection.creation.set_autocommit()\n cursor.execute(\"DROP DATABASE IF EXISTS %s_backup\" % self.db_name)\n cursor.execute(\"CREATE DATABASE %s_backup WITH TEMPLATE %s\" % (\n self.db_name, self.db_name))\n if settings.DATABASE_ENGINE == 'sqlite3':\n self.db_path = os.path.join(PROJECT_PATH, settings.DATABASE_NAME)\n self.db_backup_path = '%s_backup' % self.db_path\n if self.db_path[-3:] == '.db':\n self.db_backup_path = '%s_backup.db' % self.db_path[:-3]\n shutil.copyfile(self.db_path, self.db_backup_path)\n # Restore the database names as create_test_db changed it.\n settings.TEST_DATABASE_NAME = self.test_database_name\n settings.DATABASE_NAME = self.database_name", "def init_db():\n db.drop_all()\n db.create_all()\n seed_companies()\n seed_emission_reports()\n seed_reduction_targets()\n seed_milestones()", "def set_up_db():\n DATABASE.drop_tables([Customer])\n DATABASE.close()\n DATABASE.create_tables([Customer])\n DATABASE.close()", "def init_db():\n import cerbereapp.models\n Base.metadata.create_all(bind=engine)", "def __init_database(self):\n from admin.database import init_db\n init_db()", "def django_db_setup(django_db_setup, django_db_blocker):\n with django_db_blocker.unblock():\n # todo Now remove the --noinput just to be sure that the test database's data will be deleted\n management.call_command('flush', '--noinput')\n zakanda.db.create_initial_data()", "def initialize():\n DATABASE.connect()\n DATABASE.create_tables([User, Entry], safe=True)\n DATABASE.close()", "def setup_db():\n logger.info('Setting up db')\n setup_all_db()\n setup_emails()", "def migrate_database(self):\n\n self.db.migrate_database()", "def db_initialise():\n generate_migration_file()\n if not MySQLScheme.fetch_one(IS_MIGRATION_TABLE,\n **{\"args\": {'schema': SCHEMA}}):\n with open(MIGRATION_FILE, 'r') as init_sql:\n data = init_sql.read()\n\n if f\"CREATE TABLE IF NOT EXISTS {MIGRATION_TABLE}\" not in data:\n when = str(int(time.time()))\n sql_file = os.path.join(MIGRATION_FOLDER, f\"{when}.sql\")\n\n with open(sql_file, 'w') as save_sql:\n up = MYSQL_MIGRATION_UP.format(f\"upgrade-{when}\", when,\n MIGRATION_TABLE)\n down = MYSQL_MIGRATION_DOWN.format(f\"downgrade-{when}\",\n MIGRATION_TABLE)\n\n save_sql.write(\"\\n\\n\".join([up, down]))\n LOGGER.info(f\"migration file: \"\n f\"{os.path.join('migrations', sql_file)}\")\n else:\n when = re.findall('[0-9]+', data)[0]\n\n generate_migration_file()\n dbi_query = anosql.from_path(MIGRATION_FILE, 'psycopg2')\n MySQLScheme.commit(getattr(dbi_query, f\"upgrade_{when}\").sql)\n LOGGER.info(f\"initial successful migration: {when}\")", "def _initial_setup(self):\n logger.info(\"Performing initial database setup...\")\n\n # Set up the migration_version table\n self._execute(\n \"\"\"\n CREATE TABLE migration_version (\n version INTEGER PRIMARY KEY\n )\n \"\"\"\n )\n\n # Initially set the migration version to 0\n self._execute(\n \"\"\"\n INSERT INTO migration_version (\n version\n ) VALUES (?)\n \"\"\",\n (0,),\n )\n\n # Set up any other necessary database tables here\n\n logger.info(\"Database setup complete\")", "def syncdb():\n with virtualenv():\n run('python manage.py syncdb --noinput')\n run('python manage.py migrate')", "def setUp(self):\n db.create_all()", "def initdb():\n db.create_all()", "def initdb():\n db.create_all()", "def setup_db():\n\n engine = config['tg.app_globals'].sa_engine\n # model.init_model(engine)\n # model.metadata.create_all(engine)", "def initialize():\n DATABASE.connect()\n DATABASE.create_tables([User], safe=True)\n DATABASE.close()", "def init_db():\n current_app.logger.info('Creating database...')\n db.drop_all()\n db.create_all()\n db.session.commit()", "def smart_syncdb_migrate(self):\n local('python manage.py syncdb')\n local('python manage.py migrate')\n local('python manage.py syncdb --all')", "def init_db():\n # We are setting the module variables here for the first time, so disable the warning\n global DB_USER_TABLE # pylint: disable=global-variable-undefined\n global DB_CUSTOMER_TABLE # pylint: disable=global-variable-undefined\n global DB_USER_CUSTOMER_RELS_TABLE # pylint: disable=global-variable-undefined\n global DB_TICKET_TABLE # pylint: disable=global-variable-undefined\n global DB_COMMENT_TABLE # pylint: disable=global-variable-undefined\n\n db = TinyDB(app.config['DB_NAME'])\n\n DB_USER_TABLE = db.table('users')\n DB_CUSTOMER_TABLE = db.table('customers')\n DB_USER_CUSTOMER_RELS_TABLE = db.table('user_customer_rels')\n DB_TICKET_TABLE = db.table('tickets')\n DB_COMMENT_TABLE = db.table('comments')", "def setUp(self):\n db.drop_all() # clean up the last tests\n db.create_all() # make our sqlalchemy tables", "def init():\n database.create_tables([Tracker])\n database.commit()", "def init_db():\n db.drop_all()\n db.configure_mappers()\n db.create_all()\n db.session.commit()", "def initdb():\n db.drop_all()\n db.configure_mappers()\n db.create_all()\n db.session.commit()", "def sync_db():\n\n check_prompt = (\n not env.prompt or\n console.confirm(\n \"Create tables for models which have not yet been installed?\",\n default=True,\n )\n )\n\n if check_prompt:\n with cd(\"%s\" % env.work_path):\n with prefix(\"source %s/bin/activate\" % env.env_path):\n run(\n \"./manage.py syncdb\"\n \" --noinput\"\n )", "def initialize_db(self) -> None:\n if not self.check_schema_initialized():\n self._create_genes_table()\n self._create_meta_data_table()", "def init_db():\n\tdb.drop_all()\n\tdb.create_all()\n\n\tprint(\"Initialized Database.\")\n\treturn", "def setup_db(app):\n db.app = app\n Migrate(app, db)\n db.init_app(app)", "def initialize():\n db.connect()\n db.create_tables([Entry], safe=True)", "def initialize():\n db.connect()\n db.create_tables([Entry], safe=True)", "def init_db() -> None: \n \n Base.metadata.create_all(bind=engine)", "def django_db_setup(django_db_blocker):\n settings.DATABASES['default'] = settings.DATABASES['default']", "def setup(self):\n #print \"Creating test database...\"\n files = glob.glob(os.path.join(self.home_dir, 'sqlFiles', '*.sql'))\n for fls in files:\n loc = fls.rfind('/')\n #print(\" \" + fls.replace('.sql', '')[loc + 1:])\n flh = open(fls, 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()\n for fls in ['INSERTS', 'TRIGGERS']:\n #print(fls)\n flh = open(os.path.join(self.home_dir, 'sqlFiles', fls), 'r')\n curs = self.cursor()\n curs.executescript(flh.read())\n self.commit()\n curs.close()\n flh.close()", "def initdb():\n db = getdb()\n\n with open(os.path.join(config.BASE_DIRECTORY, 'schema.sql')) as f:\n db.executescript(f.read())", "def migrate_db():\n Base.metadata.create_all(ENGINE)", "def create_db():\n database.db.create_all()\n get_ulm()\n for fixture_file in glob.glob(config.DevelopmentConfig.FIXTURES_DIRS + '/*.json'):\n fixtures = JSONLoader().load(fixture_file)\n load_fixtures(database.db, fixtures)\n MigrationManager().stamp_db()", "def initialize():\n DATABASE.connect()\n DATABASE.drop_tables([Journal], safe=True)\n DATABASE.create_tables([Journal], safe=True)\n DATABASE.close()", "def init_db():\n # Open connection to the database\n conn = sqlite3.connect(DB_PATH)\n cursor = conn.cursor()\n\n # Open the schema file and execute its SQL code\n with current_app.open_resource('schema.sql') as db_schema:\n cursor.executescript(db_schema.read().decode('utf8'))\n\n # Save (commit) the changes\n conn.commit()\n\n # We can also close the connection if we are done with it.\n conn.close()", "def init(self):\n self.db.connect()\n try:\n self.db.create_tables([JambiModel], safe=True)\n JambiModel.create(ref='0')\n self.logger.info('Database initialized')\n except IntegrityError:\n self.logger.info('Database was already initialized')\n self.db.close()", "def init_database(self):\n init_database(self.engine)", "def init_db():\n db.drop_all()\n db.create_all()\n\n print(\"Initialized Connect 4 Database.\")", "def _initDb(self):\n CREATE_TOKEN_TABLE = '''create table token\n (token text, id int primary key)\n '''\n CREATE_DOCS_TABLE = '''create table docs\n (local_path text, resource_id text primary key, etag text, title text)\n '''\n \n try:\n self.db.execute(CREATE_TOKEN_TABLE)\n self.db.execute(CREATE_DOCS_TABLE)\n except sqlite3.OperationalError, error:\n pass", "def init_db():\n # users table\n cur.execute(\n \"CREATE TABLE IF NOT EXISTS users (\"\n \"id INTEGER PRIMARY KEY AUTO_INCREMENT,\"\n \"name VARCHAR(255) NOT NULL,\"\n \"email VARCHAR(255) NOT NULL,\"\n \"password VARCHAR(30) NOT NULL,\"\n \"birthdate DATE);\"\n )\n\n # users' phone records table\n cur.execute(\"CREATE TABLE IF NOT EXISTS records (\"\n \"id INTEGER PRIMARY KEY AUTO_INCREMENT,\"\n \"ownerID INTEGER,\"\n \"name VARCHAR(255),\"\n \"phone VARCHAR(22),\"\n \"birthdate DATE);\")", "def init_database():\n database.init(DATABASE_NAME)\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON')\n if not database.table_exists([Customer]):\n database.create_tables([Customer])\n database.close()", "def setUp(self) -> None:\n sqlite_db = setup_sqlite_in_memory_db()\n sqlite_db.create_tables([\n Schedule,\n Destination\n ])\n self.schedule_factory = ScheduleFactory()", "def setUp(self):\n create_table(self.DATABASE_PATH)\n self.model = model.CodeReviewDatabase(self.DATABASE_PATH)", "def init_database(self):\n # init_database(self.engine)", "def create_database(self):\n # FIXME(jlvillal) to work with Ironic\n raise Exception(\"Does not work\")\n\n if self.needs_database:\n conf_dir = os.path.join(self.test_dir, 'etc')\n safe_mkdirs(conf_dir)\n conf_filepath = os.path.join(conf_dir, 'ironic-manage.conf')\n\n with open(conf_filepath, 'w') as conf_file:\n conf_file.write('[DEFAULT]\\n')\n conf_file.write('sql_connection = %s' % self.sql_connection)\n conf_file.flush()\n\n ironic_db_env = 'IRONIC_DB_TEST_SQLITE_FILE'\n if ironic_db_env in os.environ:\n # use the empty db created and cached as a tempfile\n # instead of spending the time creating a new one\n db_location = os.environ[ironic_db_env]\n os.system('cp %s %s/tests.sqlite' %\n (db_location, self.test_dir))\n else:\n # FIXME(jlvillal) what is the correct command????\n cmd = ('%s -m ironic.cmd.manage --config-file %s db sync' %\n (sys.executable, conf_filepath))\n utils.execute(cmd)\n\n # copy the clean db to a temp location so that it\n # can be reused for future tests\n (osf, db_location) = tempfile.mkstemp()\n os.close(osf)\n os.system('cp %s/tests.sqlite %s' %\n (self.test_dir, db_location))\n os.environ[ironic_db_env] = db_location\n\n # cleanup the temp file when the test suite is\n # complete\n def _delete_cached_db():\n try:\n os.remove(os.environ[ironic_db_env])\n except Exception:\n # FIXME(jlvillal) We should log this\n raise NotImplementedError\n # logger.exception(\n # \"Error cleaning up the file %s\" %\n # os.environ[ironic_db_env])\n\n atexit.register(_delete_cached_db)", "def init_db():\n\twith closing(connect_db()) as db:\n\t\twith app.open_resource('schema.sql', mode='r') as f:\n\t\t\tdb.cursor().executescript(f.read())\n\t\tdb.commit()", "def init_db():\n with app.app_context():\n db = connect_db()\n with app.open_resource('schema.sql') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def initialize_empty_database(self):\r\n Base.metadata.create_all(self.engine)", "def init_db():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db(base):\n base.metadata.create_all(engine, checkfirst=True)", "def init_db():\n engine = create_engine(SQLALCHEMY_ENGINE_STR)\n Base.metadata.drop_all(bind=engine)\n Base.metadata.create_all(bind=engine)", "def setUp(self):\n db.drop_all() # clean up the last tests\n db.create_all() # create new tables\n self.app = app.test_client()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def setUp(self):\n db.drop_all() # clean up the last tests\n db.create_all() # create new tables\n self.app = app.test_client()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def init_db():\n with app.app_context():\n db = get_db()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def _initialize_db():\n conn, c = _get_db_connection()\n\n with open(str(SCHEMA_PATH)) as f:\n c.executescript(f.read())\n\n conn.close()", "def setUp(self):\n self.app = create_app()\n self.client = self.app.test_client\n setup_db(self.app, TEST_DB_PATH)\n\n # binds the app to the current context\n with self.app.app_context():\n # create all tables\n db.create_all()\n self._populate_db()", "def init_db():\n with closing(connect_db()) as db:\n with app.open_resource('schema.sql') as fobj:\n db.cursor().executescript(fobj.read())\n db.commit()", "def init_db_command():\n db.create_all()\n Contact.as_unique(db.session, email='[email protected]', name='No Name')\n ac = Account.as_unique(db.session, email='[email protected]',\n nickname='no.name')\n db.session.flush()\n Thread.as_unique(db.session, account=ac, thread_id='No thread')\n db.session.commit()\n click.echo('Initialized the database.')", "def init_db():\n db = get_db()\n with current_app.open_resource('schema.sql') as f:\n db.executescript(f.read().decode('utf8'))", "def initialize():\n\n db.connect() # Se conecta\n db.create_tables([Entry], safe=True) # Crea las tablas\n # safe=true evita crear modelos ya creados", "def create_db():\n _init_db()\n db.create_all()", "def init_database(db: sa.engine.Connectable):\n\n # setup the Postgres extensions and schema\n db.execute(\"\"\"\n CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\" WITH SCHEMA public;\n \"\"\")\n db.execute(\n ';\\n'.join(\n 'CREATE SCHEMA IF NOT EXISTS {}'.format(s) for s in SCHEMAS.values()\n )\n )\n\n # create the schema from the models\n METADATA.create_all(bind=db)", "def init_db():\n db = get_db()\n\n with current_app.open_resource(\"schema.sql\") as f:\n db.executescript(f.read().decode(\"utf8\"))", "def insert_db():\n populate_tables()", "def migrate():\n puts(yellow(\"Run South migrations\"))\n django_manage('migrate')", "def init_database(self):\n engine = create_engine('sqlite:///todo.db?check_same_thread=False')\n self.Base.metadata.create_all(engine)\n self.session = sessionmaker(bind=engine)()", "def create_database():\n with connection:\n connection.execute(CREATE_MOVIE_TABLE)\n connection.execute(CREATE_USER_TABLE)\n connection.execute(CREATE_WATCHED_TABLE)", "def init_db(self):\n\n # The user can provide a custom string\n if self.database is None:\n self.logger.error(\"You must provide a database url, exiting.\")\n sys.exit(1)\n\n self.engine = create_engine(self.database, convert_unicode=True)\n self.session = scoped_session(\n sessionmaker(autocommit=False, autoflush=False, bind=self.engine)\n )\n\n # Database Setup\n Base.query = self.session.query_property()\n\n # import all modules here that might define models so that\n # they will be registered properly on the metadata. Otherwise\n # you will have to import them first before calling init_db()\n import expfactory.database.models\n\n self.Base = Base\n self.Base.metadata.create_all(bind=self.engine)", "def initialise(self):\n\n if self.db_type == 'sqlite':\n try:\n # Attempt to create schema if not present, to cope with fresh DB file\n BaseSQLite.metadata.create_all(self.engine)\n except OperationalError:\n print(\"Error creating database schema, possible invalid path? ('\" + self.db_name + \"'). Quitting\")\n exit()\n elif self.db_type == 'postgres':\n try:\n # ensure that create schema scripts created before create table scripts\n event.listen(BasePostgres.metadata, 'before_create', CreateSchema('datastore_schema'))\n BasePostgres.metadata.create_all(self.engine)\n except OperationalError:\n print(f\"Error creating database({self.db_name})! Quitting\")\n exit()", "def syncdb():\n\n require(\"virtualenv_path\", \"project_path\", \"sudo_user\")\n utils.django_manage_run(\n env.virtualenv_path,\n env.project_path,\n \"syncdb\",\n env.sudo_user,\n )", "async def prepare_databases(self):", "def setUp(self):\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = \"Capstone\"\n self.database_path = \"postgres://{}/{}\".format('localhost:5432', self.database_name)\n setup_db(self.app , self.database_path)\n\n # binds the app to the current context\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n # create all tables\n\n self.db.create_all()", "def syncdb():\n run('source %s/bin/activate' % env.virtualenv_root)\n run('%s/mwana/manage.py syncdb' % env.code_root)", "def initdb_command():\n db.drop_all()\n db.create_all()\n if LOAD_DUMMY_DATA:\n setup_dummy_data()\n\n print('Initialized the database.')", "def setUp(self):\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = \"capstone_test\"\n self.database_path = \"postgres://postgres:0000@{}/{}\".format(\n 'localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n # binds the app to the current context\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n # create all tables\n self.db.drop_all()\n self.db.create_all()", "def setUp(self):\n INFLUX_DB_NAME = 'test_device_parameters'\n EmptyDBTestCase.client.create_database(INFLUX_DB_NAME)\n EmptyDBTestCase.client.drop_database(INFLUX_DB_NAME)\n EmptyDBTestCase.client.create_database(INFLUX_DB_NAME)", "def _db_setup(self):\n self.get_connection()\n sql_file = open(db_config.DATABASE_TABLES_SETUP_FILE, 'r')\n with self.conn.cursor() as cur:\n cur.execute(sql_file.read())\n self.conn.commit()\n logger.info(f'The script {db_config.DATABASE_TABLES_SETUP_FILE} has run.')", "def make_db():\n\n db.create_all()", "def initDB():\n global DATABASE\n\n uid0 = generate_resource_uid('Admin1', 0)\n\n DATABASE[\"users\"] = {\n \"Admin1\": {\n \"Type\": \"admin\",\n \"Password\": \"AdminPass\",\n \"Quota\": int(sys.maxsize),\n \"Resources\": {uid0},\n \"Created\": 1,\n },\n \"User1\": {\n \"Type\": \"user\",\n \"Password\": \"UserPass\",\n \"Quota\": int(sys.maxsize),\n \"Resources\": set([]),\n \"Created\": 0,\n }\n }\n\n DATABASE[\"resources\"] = {\n uid0: \"Admin1\",\n }", "def setUp(self):\n\n app.config.from_object(config['testing'])\n db.create_all()", "def init_db(ctx: click.core.Context, force: bool) -> None:\n engine = ctx.obj['engine']\n if force:\n contract.Base.metadata.drop_all(engine)\n contract.Base.metadata.create_all(engine)", "def init_db():\n with LoggerApi.app_context():\n db = get_db()\n with LoggerApi.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def initialize():\n db.connect()\n db.create_tables([Expense], safe=True)", "def update_db():\r\n settings = getattr(options, 'settings', 'dev')\r\n sh(django_cmd('lms', settings, 'syncdb', '--traceback', '--pythonpath=.'))\r\n sh(django_cmd('lms', settings, 'migrate', '--traceback', '--pythonpath=.'))", "def setup_tables(self):\n try:\n self.cursor.execute('CREATE SCHEMA sandbox')\n self.cursor.execute(\"DROP TABLE sandbox.dvds_rdbhdb_super;\")\n except (db.ProgrammingError, db.OperationalError), e:\n # sandbox may not exist\n pass #raise\n\n try:\n self.cursor.execute(\n \"\"\"CREATE TABLE sandbox.dvds_rdbhdb_super(\n id SERIAL PRIMARY KEY,\n name varchar(40) NOT NULL,\n rating float,\n UNIQUE(name)\n );\n \"\"\" )\n except db.ProgrammingError, e:\n if e[0] != '42P07':\n raise" ]
[ "0.7663976", "0.75398517", "0.75250465", "0.7471838", "0.7441649", "0.7391628", "0.73528063", "0.72446615", "0.72148156", "0.72110015", "0.71857905", "0.7177211", "0.7132408", "0.7070013", "0.705777", "0.7000869", "0.69667584", "0.69374496", "0.6935844", "0.6935844", "0.6898643", "0.68908525", "0.6869744", "0.68635553", "0.6857782", "0.6838732", "0.68320835", "0.6829788", "0.6827978", "0.6824524", "0.6823407", "0.6818129", "0.68145025", "0.6784097", "0.6784097", "0.67525536", "0.6720108", "0.6714579", "0.6707401", "0.6706508", "0.66997904", "0.6688962", "0.6667446", "0.66668844", "0.6611391", "0.6605641", "0.6600188", "0.6599274", "0.6592336", "0.6573141", "0.65523946", "0.6545265", "0.6530986", "0.6529134", "0.6526031", "0.65256035", "0.65246576", "0.65059555", "0.6500516", "0.6499535", "0.64974165", "0.64939517", "0.64917976", "0.64917976", "0.64917976", "0.64917976", "0.64917976", "0.64917976", "0.64917976", "0.6481409", "0.64767396", "0.6447854", "0.64434", "0.64380157", "0.6421764", "0.64209306", "0.6420793", "0.64126617", "0.6412598", "0.64096093", "0.639809", "0.63972867", "0.63967943", "0.6394704", "0.6390631", "0.6381632", "0.6381236", "0.6379568", "0.6377294", "0.63755417", "0.63668644", "0.63655275", "0.6359738", "0.635786", "0.6356987", "0.6340941", "0.6340632", "0.63378537", "0.633665", "0.63348776" ]
0.8100372
0
Calculate optimal alignment with FengDoolittle algorithm.
def run(self, seq_fasta_fn, subst_matrix_fn, cost_gap_open, clustering):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def seq_align(string1,string2,mismatch_penalty,gap_penalty):\n\n # define 2x2 matrix\n matrix = []\n for i in range(len(string1)+1):\n if i == 0:\n matrix.append(list([gap_penalty * x for x in range(len(string2)+1)]))\n else:\n matrix.append(list([gap_penalty * i if x == 0 else None for x in range(len(string2)+1)]))\n\n # populate matrix by looping through the strings and finding optimal value for each spot\n for i in range(len(string1)):\n for j in range(len(string2)):\n if string1[i] == string2[j]:\n val1 = 0 + matrix[i][j]\n else:\n val1 = mismatch_penalty + matrix[i][j]\n val2 = gap_penalty + matrix[i][j+1]\n val3 = gap_penalty + matrix[i+1][j]\n min_val = min(val1,val2,val3)\n matrix[i+1][j+1] = min_val\n\n\n # define values to use while retracing\n result_str1 = ''\n result_str2 = ''\n i = len(matrix)-1\n j = len(matrix[0])-1\n\n # trace through matrix to find the optimal character alignment\n while i > 0 and j > 0:\n val1 = matrix[i-1][j-1]\n val2 = matrix[i-1][j]\n val3 = matrix[i][j-1]\n min_val = min(val1,val2,val3)\n if val1 == min_val:\n result_str1 += string1[i-1]\n result_str2 += string2[j-1]\n i -= 1\n j -= 1\n elif val2 == min_val:\n result_str1 += \"-\"\n result_str2 += string2[j-1]\n i -= 1\n else:\n result_str1 += string1[i-1]\n result_str2 += \"-\"\n j -= 1\n\n # for any leftover j values\n if i == 0:\n while j > 0:\n result_str1 += '-'\n result_str2 += string2[j]\n j -=1\n\n # for any leftover i values\n if j == 0:\n while i > 0:\n result_str1 += string1[i]\n result_str2 += \"-\"\n i -= 1\n\n return matrix[len(matrix)-1][len(matrix[0])-1], result_str1[::-1], result_str2[::-1]", "def compute_local_alignment(seq_x, seq_y, scoring_matrix, alignment_matrix):\n align_x = \"\"\n align_y = \"\"\n\n len_x = len(seq_x)\n len_y = len(seq_y)\n\n #score = max([alignment_matrix[row][col] for row in range(len_x + 1) for col in range(len_y+1)])\n\n max_score = -1\n max_positions = []\n for row in range(len(seq_x)+1):\n for col in range(len(seq_y)+1):\n if alignment_matrix[row][col] == max_score:\n max_positions.append((row,col))\n if alignment_matrix[row][col] > max_score:\n max_score = alignment_matrix[row][col]\n max_positions = [(row, col)]\n max_row, max_col = random.choice(max_positions)\n\n #print max_score, max_row, max_col\n\n len_x = max_row\n len_y = max_col\n\n while alignment_matrix[len_x][len_y] > 0:\n #print len_x, len_y\n if alignment_matrix[len_x][len_y] == alignment_matrix[len_x -1][len_y - 1] + scoring_matrix[seq_x[len_x-1]][seq_y[len_y-1]]:\n align_x = seq_x[len_x-1] + align_x\n align_y = seq_y[len_y-1] + align_y\n len_x -= 1\n len_y -= 1\n elif alignment_matrix[len_x][len_y] == alignment_matrix[len_x -1][len_y] + scoring_matrix[seq_x[len_x-1]][\"-\"]:\n align_x = seq_x[len_x-1] + align_x\n align_y = \"-\" + align_y\n len_x -= 1\n else:\n align_x = \"-\" + align_x\n align_y = seq_y[len_y-1] + align_y\n len_y -= 1\n\n #while len_x > 0:\n # align_x = seq_x[len_x-1] + align_x\n # align_y = \"-\" + align_y\n # len_x -= 1\n\n #while len_y > 0:\n # align_x = \"-\" + align_x\n # align_y = seq_y[len_y-1] + align_y\n # len_y -= 1\n\n return (max_score, align_x, align_y)", "def optimal_string_alignment_similarity(s1, s2):\n max_cost = max(len(s1), len(s2))\n\n if max_cost == 0:\n return 1.0\n\n return 1.0 - float(optimal_string_alignment_distance(s1, s2)) / max_cost", "def local_align(x, y, score=ScoreParam(AA, GG, AT, AC, AG, GC, gapPen)):\n\n # create a zero-filled matrix\n A = make_matrix(len(x) + 1, len(y) + 1)\n\n best = 0\n optloc = (0,0)\n\n alignOne = []\n alignTwo = []\n\n #trace = []\n\n # fill in A in the right order\n for i in xrange(1, len(x)+1):\n #iterTrace = []\n for j in xrange(1, len(y)+1):\n # the local alignment recurrance rule:\n A[i][j] = max(\n A[i][j-1] + score.gap,\n A[i-1][j] + score.gap,\n A[i-1][j-1] + score.matchchar(x[i-1], y[j-1]),\n #0\n )\n\n #Find traceback\n '''if A[i][j] == A[i][j-1] + score.gap:\n iterTrace.append((i, j-1))\n elif A[i][j] == A[i-1][j] + score.gap:\n iterTrace.append((i-1, j))\n elif A[i][j] == A[i-1][j-1] + score.matchchar(x[i-1], y[j-1]):\n iterTrace.append((i-1, j-1))'''\n\n # track the cell with the largest score\n if A[i][j] >= best:\n best = A[i][j]\n optloc = (i,j)\n alignOne.append(x[i-1])\n alignTwo.append(y[j-1])\n\n #trace.append(iterTrace)\n alOne = ''.join(alignOne)\n alTwo = ''.join(alignTwo)\n\n print \"Scoring:\", str(score)\n print \"A matrix =\"\n #print_matrix(x, y, A)\n print \"Optimal Score =\", best\n print \"Max location in matrix =\", optloc\n #for i in range(0, len(trace)):\n #print(trace[i])\n # return the opt score and the best location\n return best, optloc, alOne, alTwo", "def compute_local_alignment(seq_x,seq_y,scoring_matrix,alignment_matrix):\n #initialization of variables\n x_pos = -1\n y_pos = -1\n result_seq_x = ''\n result_seq_y = ''\n score = 0\n\n #determine start position in alignment_matrix as position with maximum value \n for row in range(len(seq_x) + 1):\n for col in range(len(seq_y) + 1):\n if alignment_matrix[row][col] > score:\n score = alignment_matrix[row][col]\n x_pos = row\n y_pos = col\n\n #start in start position and go upwards till we reach first entry with value 0\n #in every iteration we reconstruct alignments based on value in alignment_matrix and scoring_matrix\n while x_pos != 0 and y_pos !=0:\n current_value = alignment_matrix[x_pos][y_pos]\n if current_value == 0:\n break\n \n if current_value == alignment_matrix[x_pos-1][y_pos-1] + scoring_matrix[seq_x[x_pos-1]][seq_y[y_pos-1]]:\n result_seq_x = seq_x[x_pos-1] + result_seq_x\n result_seq_y = seq_y[y_pos-1] + result_seq_y\n x_pos -= 1\n y_pos -= 1\n elif current_value == alignment_matrix[x_pos-1][y_pos] + scoring_matrix[seq_x[x_pos-1]][\"-\"]:\n result_seq_x = seq_x[x_pos-1] + result_seq_x\n result_seq_y = \"-\" + result_seq_y\n x_pos -= 1\n else: \n result_seq_x = \"-\" + result_seq_x\n result_seq_y = seq_y[y_pos-1] + result_seq_y\n y_pos -= 1\n\n return (score,result_seq_x,result_seq_y)", "def compute_local_alignment(seq_x,seq_y,scoring_matrix,alignment_matrix):\n best_score = 0\n len_m, len_n = len(seq_x), len(seq_y)\n best_i = 0\n best_j = 0\n x_ret, y_ret = '', ''\n for idx_i in range(len_m+1):\n for idx_j in range(len_n+1):\n if alignment_matrix[idx_i][idx_j] > best_score:\n best_score = alignment_matrix[idx_i][idx_j]\n best_i = idx_i\n best_j = idx_j\n idx_i = best_i\n idx_j = best_j\n while idx_i != 0 and idx_j != 0:\n if alignment_matrix[idx_i][idx_j] == 0:\n return (best_score, x_ret, y_ret)\n if alignment_matrix[idx_i][idx_j] == (alignment_matrix[idx_i-1][idx_j-1] +\n scoring_matrix[seq_x[idx_i-1]][seq_y[idx_j-1]]):\n # score from diagnoal cell\n x_ret = (seq_x[idx_i-1]) + x_ret\n y_ret = (seq_y[idx_j-1]) + y_ret\n idx_i -= 1\n idx_j -= 1\n elif alignment_matrix[idx_i][idx_j] == (alignment_matrix[idx_i-1][idx_j] +\n scoring_matrix[seq_x[idx_i-1]]['-']):\n # score from above cell\n x_ret = (seq_x[idx_i - 1]) + x_ret\n y_ret = ('-') + y_ret\n idx_i -= 1\n else:\n # score from left cell\n x_ret = ('-') + x_ret\n y_ret = (seq_y[idx_j - 1]) + y_ret\n idx_j -= 1\n while idx_i != 0:\n if alignment_matrix[idx_i][idx_j] == 0:\n return (best_score, x_ret, y_ret)\n\n # idx_j = 0, move upward along first column\n x_ret = (seq_x[idx_i - 1]) + x_ret\n y_ret = ('-') + y_ret\n idx_i -= 1\n while idx_j != 0:\n if alignment_matrix[idx_i][idx_j] == 0:\n return (best_score, x_ret, y_ret)\n\n # idx_i = 0, move left along first row\n x_ret = ('-') + x_ret\n y_ret = (seq_y[idx_j - 1]) + y_ret\n idx_j -= 1\n return (best_score, x_ret, y_ret)", "def main(argv):\n \n ### gets data from csv, sets variables\n seq1, seq2 = get_seqs('../data/seq.csv')\n \n \n # Assign the longer sequence to s1, and the shorter to s2\n l1, l2 = len(seq1), len(seq2)\n if l1 >= l2:\n s1, s2 = ((l2 - 1) * \".\" + seq1 + (l2 - 1) * \".\"), seq2\n #puts l2-1 \".\"s both sides of l1, allows alignment of all overlap combos\n else:\n s1, s2 = ((l1 - 1) * \".\" + seq2 + (l1 - 1) * \".\"), seq1\n l1, l2 = l2, l1 \n\n # writes alignment(s) with highest score into output file\n my_best_score = -1 #so 0 beats best score\n for i in range(l1 + l2 -1):\n score, matched, shift, end_shift = calculate_score(s1, s2, l1, l2, i)\n #assigns returns from calc_score function to these variables\n if score > my_best_score:\n my_best_score = score\n statement = \"This alignment occurs when the smaller strand (\" + \\\n str(l2) + \"nt in length) attaches from base \" + str(i - l2 + 2) + \\\n \" of the larger strand, with the highest score of \" + str(score) + \\\n \":\\n\"\n #statement explaining the alignment in detail\n best_comparison_highSP = (shift + matched + (l2 - 1) * \".\" + \"\\n\")\n best_comparison_lowSP = (shift + matched + end_shift + \"\\n\")\n best_s2, best_s1 = (shift + s2 + end_shift + \"\\n\"), (s1 + \"\\n\\n\\n\")\n #formats the matching, s1 and s2 lines to line-up neatly\n if i < l1 - 1:\n best_alignment = (str(statement) + str(best_comparison_lowSP) \\\n + str(best_s2) + str(best_s1))\n else:\n best_alignment = (str(statement) + str(best_comparison_highSP) \\\n + str(best_s2) + str(best_s1))\n # uses returned variables to write a statement about the alignment \n # giving its score and startpoint, and assigns 3 lines of alignment \n # (s1, s2 and matching bases) to a variable each for later printing\n f = open('../results/seqs_align.txt', 'w')\n f.write(best_alignment)\n f.close()\n print(\"Done!\")\n return None", "def alignment(gram1, gram2):\n # BUG: this loss function causes abnormal optimization behaviors, see\n # comments in past commits\n\n alignment = frobenius_inner_prod(gram1, gram2) /\\\n m.sqrt(frobenius_inner_prod(gram1, gram1) *\n frobenius_inner_prod(gram2, gram2))\n return alignment", "def getAlignment(self):\n # Code to complete - generated by traceback through matrix to generate aligned pairs\n \n # find the position of the max_value\n max_value = self.getMaxAlignmentScore()\n max_pos = tuple(numpy.argwhere(self.matrix == max_value)[-1])\n x_pos = max_pos[0]; y_pos = max_pos[1]\n\n # array that holds the tuples\n path = list()\n\n # now find the path to the 0\n \n while self.matrix[x_pos][y_pos] != 0:\n \n # if diagonal is a match take that as priority\n if self.string1[x_pos - 1] == self.string2[y_pos - 1]:\n path.append((x_pos - 1, y_pos - 1))\n x_pos -=1; y_pos -= 1\n continue\n\n # finds the best horizontal alignment\n bestX = 0; bestY = y_pos - 1\n for i in range(x_pos - 1):\n if self.matrix[i][y_pos - 1] >= self.matrix[bestX][bestY]:\n bestX = i\n \n # finds best vertical alignment\n bestX_vertical = x_pos - 1; bestY_vertical = 0\n for i in range(y_pos - 1):\n if self.matrix[x_pos - 1][i] >= self.matrix[bestX_vertical][bestY_vertical]:\n bestY_vertical = i\n \n # if diagonal not satisfied, see which is better\n # the horizontal of vertical alignment.\n if self.matrix[bestX][bestY] < self.matrix[bestX_vertical][bestY_vertical]:\n path.append((bestX_vertical, bestY_vertical))\n x_pos = bestX_vertical; y_pos = bestY_vertical\n else:\n path.append((bestX, bestY))\n x_pos = bestX; y_pos = bestY\n\n return path[::-1] # reversed because we want origin to highest element.", "def question2():\n \n # load sequences and scoring matrix\n score_matrix = read_scoring_matrix(PAM50_URL)\n human_seq = \"HSGVNQLGGVFVNGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATPEVVSKIAQYKRECPSIFAWEIRDRLLSEGVCTNDNIPSVSSINRVLRNLASEKQQ\"\n frfly_seq = \"HSGVNQLGGVFVGGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATAEVVSKISQYKRECPSIFAWEIRDRLLQENVCTNDNIPSVSSINRVLRNLAAQKEQQ\"\n consensus_pax = read_protein(CONSENSUS_PAX_URL)\n \n # compute human and fruitfly global alignment matrix with consensus pax\n human_align_matrix = student.compute_alignment_matrix(human_seq, consensus_pax, score_matrix, True)\n frfly_align_matrix = student.compute_alignment_matrix(frfly_seq, consensus_pax, score_matrix, True)\n \n # compute human and fruitfly global alignment sequences\n score_human, human_align, consensus_align = student.compute_global_alignment(human_seq, consensus_pax, \n score_matrix, human_align_matrix)\n score_fly, frfly_align, consensus_align_2 = student.compute_global_alignment(frfly_seq, consensus_pax,\n score_matrix, frfly_align_matrix)\n \n # compute percentages match for human and fruitfly\n human_count = 0.0\n for index in range(len(human_align)):\n if human_align[index] == consensus_align[index]:\n human_count += 1\n \n frfly_count = 0.0\n for index in range(len(frfly_align)):\n if frfly_align[index] == consensus_align_2[index]:\n frfly_count += 1\n \n print \"% Human: \" + str(human_count / len(human_align) * 100)\n print \"Hmn: \" + human_align\n print \"PAX: \" + consensus_align\n \n print \"\"\n \n print \"% FrFly: \" + str(frfly_count / len(frfly_align) * 100)\n print \"Fly: \" + frfly_align\n print \"PAX: \" + consensus_align_2", "def local_aligner_score(s1, s2, gap_penalty=-1, gap_opening_penalty=-10, edit_function=utils.sub_matrices_distance, matrix=MatrixInfo.pam120):\n\n n_row = len(s1) + 1\n n_col = len(s2) + 1\n # Creates a matrix where the partial scores are stored.\n S = np.zeros((n_row, n_col))\n # Creates a matrix (stored as DataFrame) where the optimal movements are\n # stored.\n backtrack_matrix = pd.DataFrame(\"\", index=np.arange(n_row), columns=np.arange(n_col))\n\n # Initialize the first column and row of the matrices.\n # In the local aligner, we stop when a 0 is encountered, which corresponds to an \"X\"\n for i in range(n_row):\n backtrack_matrix.set_value(i, 0, \"X\")\n\n for j in range(n_col):\n backtrack_matrix.set_value(0, j, \"X\")\n \n # small optimization: keep track of the maximum score encountered so far, and its indices.\n score_max = 0\n i_max = 0\n j_max = 0\n \n for i in range(1, n_row):\n for j in range(1, n_col):\n # Compute the possible movements, and then keeps the best.\n s1_gap = max([S[i - k, j] + utils.gap_function(gap_penalty, gap_opening_penalty, k) for k in range(1, i+1)])\n s2_gap = max([S[i, j - k] + utils.gap_function(gap_penalty, gap_opening_penalty, k) for k in range(1, j+1)])\n mut = S[i - 1, j - 1] + edit_function(s1[i - 1], s2[j - 1], matrix=matrix)\n # In the local aligner, don't accept negative scores!\n S[i, j] = max(s1_gap, s2_gap, mut, 0)\n\n if S[i, j] >= score_max:\n score_max = S[i, j]\n i_max = i\n j_max = j\n # Write in the matrix the movement that lead to that cell, as a string.\n # e.g. \"HV\" means that horizontal and vertical movements were the\n # best.\n # In local alignment, \"X\" means that 0 was the maximum value, and all the movements gave a negative score.\n # The backtracking will stop when an \"X\" is encountered.\n backtrack_matrix.set_value(i, j, \"\".join(check_argmax([s1_gap, s2_gap, mut, 0])))\n \n return [score_max, S, backtrack_matrix, i_max, j_max]", "def compute_global_alignment(seq_x,seq_y,scoring_matrix,alignment_matrix):\n #initialization of start position as bottom-right corner of matrix\n x_pos = len(seq_x)\n y_pos = len(seq_y)\n\n #initialization of variables\n result_seq_x = ''\n result_seq_y = ''\n score = alignment_matrix[x_pos][y_pos]\n\n #start in bottom right corner of matrix and go upwards till we reach left or upper edge\n #in every iteration we reconstruct alignments based on value in alignment_matrix and scoring_matrix\n while x_pos != 0 or y_pos !=0:\n current_value = alignment_matrix[x_pos][y_pos]\n \n if current_value == alignment_matrix[x_pos-1][y_pos-1] + scoring_matrix[seq_x[x_pos-1]][seq_y[y_pos-1]] and x_pos > 0 and y_pos > 0:\n result_seq_x = seq_x[x_pos-1] + result_seq_x\n result_seq_y = seq_y[y_pos-1] + result_seq_y\n x_pos -= 1\n y_pos -= 1\n elif current_value == alignment_matrix[x_pos-1][y_pos] + scoring_matrix[seq_x[x_pos-1]][\"-\"]:\n result_seq_x = seq_x[x_pos-1] + result_seq_x\n result_seq_y = \"-\" + result_seq_y\n x_pos -= 1\n else: \n result_seq_x = \"-\" + result_seq_x\n result_seq_y = seq_y[y_pos-1] + result_seq_y\n y_pos -= 1\n\n return (score,result_seq_x,result_seq_y)", "def needwu(A,B,S,d):\n AlignementA=\"\"\n AlignementB=\"\"\n F=matriF(A,B,S,d)\n i=len(A)-1\n j=len(B)-1\n\n while i>0 and j>0:\n score=F[i][j]\n scorediag=F[i-1][j-1]\n scoreup=F[i][j-1]\n scoreleft=F[i-1][j]\n\n if score==(scorediag+S[ind(A[i])][ind(B[j])]):\n AlignementA=A[i]+AlignementA\n AlignementB=B[j]+AlignementB\n i=i-1\n j=j-1\n\n elif score==(scoreleft+d):\n AlignementA=A[i]+AlignementA\n AlignementB=\"-\"+AlignementB\n i=i-1\n\n elif score==(scoreup+d):\n AlignementA=\"-\"+AlignementA\n AlignementB=B[j]+AlignementB\n j=j-1\n\n while i>0:\n AlignementA=A[i]+AlignementA\n AlignementB=\"-\"+AlignementB\n i=i-1\n\n while j>0:\n AlignementA=\"-\"+AlignementA\n AlignementB=B[j]+AlignementB\n j=j-1\n\n return AlignementA, AlignementB", "def find_best_align(s1, s2, l1, l2):\n\n my_best_align = None\n my_best_score = -1\n\n for i in range(l1): # Note that you just take the last alignment with the highest score\n z = calculate_score(s1, s2, l1, l2, i)\n if z > my_best_score:\n my_best_align = \".\" * i + s2 # prints number of '.' to get to startpoint (which is i here)\n my_best_score = z\n\n # Formatted output\n print(my_best_align)\n print(s1)\n print(\"Best score:\", my_best_score)\n\n return my_best_align, my_best_score", "def local_align(x, y, gap, match, mismatch):\n # create a zero-filled matrix\n A = make_matrix(len(x) + 1, len(y) + 1)\n # make a copy of A to keep the path\n path = make_matrix(len(x) + 1, len(y) + 1)\n print(len(A[0]))\n print(len(A))\n # print(A[12][11])\n best = 0\n optloc = (0, 0)\n # fill in A in the right order\n for i in range(1, len(y)):\n for j in range(1, len(x)):\n print(\"Test\")\n # get the values of the neighbouring cells\n left = A[i][j - 1] + gap\n up = A[i - 1][j] + gap\n diagonally = A[i - 1][j - 1] + (match if x[i] == y[j] else mismatch)\n\n maxCell = max(left, up, diagonally, 0)\n\n # the local alignment recurrance rule:\n A[i][j] = maxCell\n\n # track the cell with the largest score\n if A[i][j] >= best:\n best = A[i][j]\n optloc = (i, j)\n\n # track the path in a matrix\n # 0 is left\n # 1 is up\n # 2 is diagonally\n # 3 is zero value\n if left == maxCell:\n path[i][j] = 0\n elif up == maxCell:\n path[i][j] = 1\n elif diagonally == maxCell:\n path[i][j] = 2\n else:\n path[i][j] = 3\n\n # track where we got\n # return the opt score and the best location\n return best, optloc, path, A", "def compute_global_alignment(seq_x, seq_y, scoring_matrix, alignment_matrix):\n num_rows = len(seq_x)\n num_cols = len(seq_y)\n x_prime = ''\n y_prime = ''\n\n while num_rows != 0 and num_cols != 0:\n if alignment_matrix[num_rows][num_cols] == alignment_matrix[num_rows-1][num_cols-1] + scoring_matrix[seq_x[num_rows-1]][seq_y[num_cols-1]]:\n x_prime = seq_x[num_rows-1] + x_prime\n y_prime = seq_y[num_cols-1] + y_prime\n num_rows -= 1\n num_cols -= 1\n else:\n if alignment_matrix[num_rows][num_cols] == alignment_matrix[num_rows-1][num_cols] + scoring_matrix[seq_x[num_rows-1]]['-']:\n x_prime = seq_x[num_rows-1] + x_prime\n y_prime = '-' + y_prime\n num_rows -= 1\n else:\n x_prime = '-' + x_prime\n y_prime = seq_y[num_cols-1] + y_prime\n num_cols -= 1\n \n while num_rows != 0:\n x_prime = seq_x[num_rows-1] + x_prime\n y_prime = '-' + y_prime\n num_rows -= 1\n\n while num_cols != 0:\n x_prime = '-' + x_prime\n y_prime = seq_y[num_cols-1] + y_prime\n num_cols -= 1\n\n # compute score of alignment\n score = 0\n for position in range(len(x_prime)):\n score += scoring_matrix[x_prime[position]][y_prime[position]]\n\n return (score, x_prime, y_prime)", "def question7(seq_x, seq_y):\n \n diag_score = 2\n off_diag_score = 1\n dash_score = 0\n alphabet = \"abcdefghijklmnopqrstuvwxyz\"\n score_matrix = student.build_scoring_matrix(alphabet, diag_score, off_diag_score, dash_score)\n \n align_matrix = student.compute_alignment_matrix(seq_x, seq_y, score_matrix, True)\n score, align_x, align_y = student.compute_global_alignment(seq_x, seq_y, score_matrix, align_matrix)\n \n edit_distance = len(seq_x) + len(seq_y) - score\n \n print \"Edit distance: \" + str(edit_distance)\n print align_x\n print align_y", "def compute_global_alignment(seq_x, seq_y, scoring_matrix, alignment_matrix):\n\n align_x = \"\"\n align_y = \"\"\n\n len_x = len(seq_x)\n len_y = len(seq_y)\n\n score = alignment_matrix[len_x][len_y]\n\n while len_x > 0 and len_y > 0:\n if alignment_matrix[len_x][len_y] == alignment_matrix[len_x -1][len_y - 1] + scoring_matrix[seq_x[len_x-1]][seq_y[len_y-1]]:\n align_x = seq_x[len_x-1] + align_x\n align_y = seq_y[len_y-1] + align_y\n len_x -= 1\n len_y -= 1\n elif alignment_matrix[len_x][len_y] == alignment_matrix[len_x -1][len_y] + scoring_matrix[seq_x[len_x-1]][\"-\"]:\n align_x = seq_x[len_x-1] + align_x\n align_y = \"-\" + align_y\n len_x -= 1\n else:\n align_x = \"-\" + align_x\n align_y = seq_y[len_y-1] + align_y\n len_y -= 1\n\n while len_x > 0:\n align_x = seq_x[len_x-1] + align_x\n align_y = \"-\" + align_y\n len_x -= 1\n\n while len_y > 0:\n align_x = \"-\" + align_x\n align_y = seq_y[len_y-1] + align_y\n len_y -= 1\n\n return (score, align_x, align_y)", "def compute_alignment_matrix(seq_x,seq_y,scoring_matrix,global_flag):\n \n rows = len(seq_x)\n cols = len(seq_y)\n #if sequences are empty return [[0]]\n if rows == 0 and cols == 0:\n return [[0]]\n \n #initialize of alignment matrix and other variables\n alignment_matrix = [[ 0 for col in range(cols+1)] for row in range(rows+1)]\n value = 0\n \n for row in range(rows+1):\n for col in range(cols+1):\n #for every entry its value is computed \n if row == 0 and col == 0:\n #entry [0,0]\n alignment_matrix[row][col] = 0\n elif row == 0:\n #entry [0,j] is computed based on values [0,j-1] and score of (\"-\" and seq_y[j]) \n value = alignment_matrix[row][col-1] + scoring_matrix[\"-\"][seq_y[col-1]]\n elif col == 0:\n #entry [i,0] is computed based on values [i-1,0] and score of (seq_x[i] and \"-\")\n value = alignment_matrix[row-1][col] + scoring_matrix[seq_x[row-1]][\"-\"]\n else:\n #entry [i,j] is computed based of [i-1,j-1],[i,j-1],[i-1,j] as maximum of values\n val1 = alignment_matrix[row-1][col-1] + scoring_matrix[seq_x[row-1]][seq_y[col-1]]\n val2 = alignment_matrix[row-1][col] + scoring_matrix[seq_x[row-1]][\"-\"]\n val3 = alignment_matrix[row][col-1] + scoring_matrix[\"-\"][seq_y[col-1]]\n\n value = max(val1,val2,val3)\n \n if not global_flag:\n #for local alignment negative score is replaced with 0\n value = max(value,0)\n \n alignment_matrix[row][col] = value \n\n return alignment_matrix", "def optimal_string_alignment_distance(s1, s2):\n\n utils.check_for_none(s1, s2)\n utils.check_for_type(str, s1, s2)\n\n # s1 = utils.unicode_normalize(s1)\n # s2 = utils.unicode_normalize(s2)\n\n n1, n2 = len(s1), len(s2)\n\n dp = [[0] * (n2 + 1) for _ in range(n1 + 1)]\n\n for i in range(0, n1 + 1):\n dp[i][0] = i\n for j in range(0, n2 + 1):\n dp[0][j] = j\n\n for i in range(1, n1 + 1):\n for j in range(1, n2 + 1):\n cost = 0 if s1[i - 1] == s2[j - 1] else 1\n\n dp[i][j] = min(dp[i][j - 1] + 1,\n dp[i - 1][j] + 1,\n dp[i - 1][j - 1] + cost)\n\n if i > 1 and j > 1 and s1[i - 1] == s2[j - 2] and s1[i - 2] == s2[j - 1]:\n dp[i][j] = min(dp[i][j], dp[i - 2][j - 2] + cost)\n\n return dp[n1][n2]", "def global_alignment(first_seq, second_seq, match_penalty_value, mismatch_penalty_value, gap_penalty_value):\n alignment_matrix = initiate_matrix(first_seq, second_seq, gap_penalty_value)\n path_matrix = np.zeros((alignment_matrix.shape[0], alignment_matrix.shape[1], 3), dtype=str)\n scores = []\n \"\"\" Second step is to apply get the max score method, then assign it to the current cell. \"\"\"\n for i in range(1, alignment_matrix.shape[0]):\n for j in range(1, alignment_matrix.shape[1]):\n row_score = alignment_matrix[i, j - 1] + gap_penalty_value\n column_score = alignment_matrix[i - 1, j] + gap_penalty_value\n if second_seq[i - 1] == first_seq[j - 1]:\n diagonal_score = alignment_matrix[i - 1, j - 1] + match_penalty_value\n else:\n diagonal_score = alignment_matrix[i - 1, j - 1] + mismatch_penalty_value\n scores.append(row_score)\n scores.append(column_score)\n scores.append(diagonal_score)\n alignment_matrix[i, j], ex_cell = get_max_score(scores)\n scores.clear()\n for I in range(ex_cell.size):\n if ex_cell[0][I] == 0:\n path_matrix[i, j, 1] = \"S\"\n elif ex_cell[0][I] == 1:\n path_matrix[i, j, 1] = \"F\"\n elif ex_cell[0][I] == 2:\n path_matrix[i, j, 1] = \"D\"\n\n max_score = alignment_matrix[i, j]\n \"\"\" Third step is to trace back.\"\"\"\n f, s = trace_back(path_matrix, first_seq, second_seq, match_penalty_value, mismatch_penalty_value,\n gap_penalty_value)\n \"\"\"Last step is to check the max score with the aligned sequences score. \"\"\"\n new_s, check = check_alignment_score(f, s, max_score, match_penalty_value, mismatch_penalty_value,\n gap_penalty_value)\n return f, s, new_s, check", "def sequence_align(string_v, string_w):\n m = len(string_v)\n n = len(string_w)\n\n # Initialization; D[i][j][0] contains the max alignment score of the\n # ith prefix of v and the jth of w; D[i][j][1] contains the back pointer.\n D = [[(0, START) for _ in range(n + 1)] for _ in range(m + 1)]\n\n for i in range(1, m + 1):\n D[i][0] = (D[i - 1][0][0] + blosum['-', string_v[i - 1]], DELETE)\n\n for j in range(1, n + 1):\n D[0][j] = (D[0][j - 1][0] + blosum['-', string_w[j - 1]], INSERT)\n\n # Recurrence\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n insert = D[i][j-1][0] + blosum['-', string_w[j - 1]]\n delete = D[i-1][j][0] + blosum[string_v[i - 1], '-']\n substitute = D[i-1][j-1][0] + blosum[string_v[i - 1], string_w[j - 1]]\n # Set D[i][j] to the max of the recurrences\n if insert > delete and insert > substitute:\n D[i][j] = (insert, INSERT)\n elif delete > substitute:\n D[i][j] = (delete, DELETE)\n else:\n D[i][j] = (substitute, SUBSTITUTE)\n\n i, j = m, n\n v_aligned = ''\n w_aligned = ''\n back_pointer = D[i][j][1]\n while back_pointer != START:\n if back_pointer == INSERT:\n j -= 1\n v_aligned = '-' + v_aligned\n w_aligned = string_w[j] + w_aligned\n\n \n elif back_pointer == DELETE:\n i -= 1\n v_aligned = string_v[i] + v_aligned\n w_aligned = '-' + w_aligned\n\n elif back_pointer == SUBSTITUTE:\n i -= 1\n j -= 1\n v_aligned = string_v[i] + v_aligned\n w_aligned = string_w[j] + w_aligned\n\n \n back_pointer = D[i][j][1]\n \n return v_aligned, w_aligned", "def cost(solution):\n cost = 0\n alm_count = 0 # alignment operand/operator count\n fst_len = len(solution[0])\n snd_len = len(solution[1])\n min_len = min(fst_len, snd_len)\n\n for i in range(min_len):\n if solution[0][i] == solution[1][i]:\n alm_count += 1\n\n max_spaces = max(solution[0].count(\" \"), solution[1].count(\" \"))\n cost = ((alm_count * 2.0) + max_spaces) / 3.0\n\n return cost", "def compute_alignment_matrix(seq_x,seq_y,scoring_matrix,global_flag):\n m_len, n_len = len(seq_x), len(seq_y)\n s_alignment_matrix = [[0 for _ in range(n_len+1)] for _ in range(m_len+1)]\n # print s_alignment_matrix\n for i_idx in range(1, m_len+1):\n last_score = s_alignment_matrix[i_idx-1][0] + scoring_matrix[seq_x[i_idx-1]]['-']\n s_alignment_matrix[i_idx][0] = \\\n (global_flag) and last_score or max(0, last_score)\n\n for j_idx in range(1,n_len+1):\n last_score = s_alignment_matrix[0][j_idx-1] + scoring_matrix['-'][seq_y[j_idx-1]]\n s_alignment_matrix[0][j_idx] = (global_flag) and last_score or max(0, last_score)\n\n for i_idx in range(1, m_len+1):\n for j_idx in range(1, n_len+1):\n diag_score = s_alignment_matrix[i_idx-1][j_idx-1] + scoring_matrix[seq_x[i_idx-1]][seq_y[j_idx-1]]\n up_score = s_alignment_matrix[i_idx-1][j_idx] + scoring_matrix[seq_x[i_idx-1]]['-']\n left_score = s_alignment_matrix[i_idx][j_idx-1] + scoring_matrix['-'][seq_y[j_idx-1]]\n max_score = max(diag_score,up_score,left_score)\n s_alignment_matrix[i_idx][j_idx] = (global_flag) and max_score or max(0, max_score)\n return s_alignment_matrix", "def prob_t_a_given_s(self, alignment_info):\n probability = 1.0\n MIN_PROB = IBMModel.MIN_PROB\n slots = Slots(len(alignment_info.trg_sentence) - 1)\n\n def null_generation_term():\n # Binomial distribution: B(m - null_fertility, p1)\n value = 1.0\n p1 = self.p1\n p0 = 1 - p1\n null_fertility = alignment_info.fertility_of_i(0)\n m = len(alignment_info.trg_sentence) - 1\n value *= pow(p1, null_fertility) * pow(p0, m - 2 * null_fertility)\n if value < MIN_PROB:\n return MIN_PROB\n\n # Combination: (m - null_fertility) choose null_fertility\n for i in range(1, null_fertility + 1):\n value *= (m - null_fertility - i + 1) / i\n return value\n\n def fertility_term():\n value = 1.0\n src_sentence = alignment_info.src_sentence\n for i in range(1, len(src_sentence)):\n fertility = alignment_info.fertility_of_i(i)\n value *= (\n factorial(fertility)\n * self.fertility_table[fertility][src_sentence[i]]\n )\n if value < MIN_PROB:\n return MIN_PROB\n return value\n\n def lexical_translation_term(j):\n t = alignment_info.trg_sentence[j]\n i = alignment_info.alignment[j]\n s = alignment_info.src_sentence[i]\n return self.translation_table[t][s]\n\n def vacancy_term(i):\n value = 1.0\n tablet = alignment_info.cepts[i]\n tablet_length = len(tablet)\n total_vacancies = slots.vacancies_at(len(slots))\n\n # case 1: NULL-aligned words\n if tablet_length == 0:\n return value\n\n # case 2: head word\n j = tablet[0]\n previous_cept = alignment_info.previous_cept(j)\n previous_center = alignment_info.center_of_cept(previous_cept)\n dv = slots.vacancies_at(j) - slots.vacancies_at(previous_center)\n max_v = total_vacancies - tablet_length + 1\n trg_class = self.trg_classes[alignment_info.trg_sentence[j]]\n value *= self.head_vacancy_table[dv][max_v][trg_class]\n slots.occupy(j) # mark position as occupied\n total_vacancies -= 1\n if value < MIN_PROB:\n return MIN_PROB\n\n # case 3: non-head words\n for k in range(1, tablet_length):\n previous_position = tablet[k - 1]\n previous_vacancies = slots.vacancies_at(previous_position)\n j = tablet[k]\n dv = slots.vacancies_at(j) - previous_vacancies\n max_v = total_vacancies - tablet_length + k + 1 - previous_vacancies\n trg_class = self.trg_classes[alignment_info.trg_sentence[j]]\n value *= self.non_head_vacancy_table[dv][max_v][trg_class]\n slots.occupy(j) # mark position as occupied\n total_vacancies -= 1\n if value < MIN_PROB:\n return MIN_PROB\n\n return value\n\n # end nested functions\n\n # Abort computation whenever probability falls below MIN_PROB at\n # any point, since MIN_PROB can be considered as zero\n probability *= null_generation_term()\n if probability < MIN_PROB:\n return MIN_PROB\n\n probability *= fertility_term()\n if probability < MIN_PROB:\n return MIN_PROB\n\n for j in range(1, len(alignment_info.trg_sentence)):\n probability *= lexical_translation_term(j)\n if probability < MIN_PROB:\n return MIN_PROB\n\n for i in range(1, len(alignment_info.src_sentence)):\n probability *= vacancy_term(i)\n if probability < MIN_PROB:\n return MIN_PROB\n\n return probability", "def prob_t_a_given_s(self, alignment_info):\n ...", "def local_aligner(s1, s2, gap_penalty=-1, gap_opening_penalty=-10, k=1, sub_alignments_num=1, edit_function=utils.sub_matrices_distance, matrix=MatrixInfo.pam120):\n\n alignments = []\n \n # Build the initial score matrix.\n [score, S, backtrack_matrix, i_max, j_max] = local_aligner_score(s1, s2, gap_penalty=gap_penalty, gap_opening_penalty=gap_opening_penalty, edit_function=edit_function, matrix=matrix)\n for n in range(sub_alignments_num):\n align_list_n = gb2.backtrack_sequence_rec(s1[:i_max], s2[:j_max], backtrack_matrix.iloc[:i_max+1, :j_max+1], k=k)\n \n # Add the alignment scores to each alignment\n for align_i in align_list_n:\n align_i.score = score\n # Add the alignments to the overall list of alignments\n alignments += align_list_n\n \n # Update the score matrix to get more subalignments.\n # Small optimization: done only if sub_alignments_num > 1\n if sub_alignments_num > 1:\n # Update the score matrix to get more subalignments.\n # Get the coordinates of one best matching\n coordinate_list = reconstruct_sequence(s1, s2, S, backtrack_matrix.iloc[:i_max+1, :j_max+1], gap_penalty, gap_opening_penalty, edit_function=edit_function, matrix=matrix)\n update_score_matrix(s1, s2, S, coordinate_list, backtrack_matrix, gap_penalty, gap_opening_penalty, edit_function=edit_function, matrix=matrix)\n\n # Find the new maximum value in the matrix.\n [i_max, j_max] = np.unravel_index(np.argmax(S), S.shape)\n score = S[i_max, j_max]\n if i_max == 0 and j_max == 0:\n break\n \n return alignments", "def test_align_sanity(self):\n # QWERTY resemblance matrix:\n R = qwerty_distance()\n diff, u, r = min_difference_align(\"polynomial\", \"exponential\", R)\n # Warning: we may (read: 'will') use another matrix!\n self.assertEqual(diff, 15)\n # Warning: there may be other optimal matchings!\n self.assertEqual(u, '--polyn-om-ial')\n self.assertEqual(r, 'exp-o-ne-ntial')", "def test_align():\n target = ('TAAATAAATATCTGGTGTTTGAGGCAAAAAGGCAGACTTAAATTCTAAATCACACCTGTGCTT'\n 'CCAGCACTACCTTCAAGCGCAGGTTCGAGCCAGTCAGGCAGGGTACATAAGAGTCCATTGTGC'\n 'CTGTATTATTTTGAGCAATGGCTAAAGTACCTTCACCCTTGCTCACTGCTCCCCCACTTCCTC'\n 'AAGTCTCATCGTGTTTTTTTTAGAGCTAGTTTCTTAGTCTCATTAGGCTTCAGTCACCAT')\n query = ('TCTGGTGTTTGAGGCAAAAAGGCAGACTTAAATTCTAAATCACACCTGTGCTTCCAGCACTACC'\n 'TTCAAGCGCAGGTTCGAGCCAGTCAGGACTGCTCCCCCACTTCCTCAAGTCTCATCGTGTTTTT'\n 'TTTAGAGCTAGTTTCTTAGTCTCATTAGGCTTCAGTCACCATCATTTCTTATAGGAATACCA')\n assert kevlar.align(target, query) == ('10D91M69D79M20I', 155)", "def simpleMap(targetString, minimizerIndex, queryString, config):\n bestAlignment = [None]\n \n def mapForwards(queryString):\n \"\"\" Maps the query string forwards\n \"\"\"\n # Find seed matches, aka \"aligned kmers\"\n seeds = list(minimizerIndex.getMatches(queryString))\n \n # For each cluster of seeds\n for seedCluster in SeedCluster.clusterSeeds(list(seeds), l=config.l):\n \n # Get substring of query and target to align\n queryStringStart = max(0, seedCluster.minX - config.c) # Inclusive coordinate\n queryStringEnd = min(len(queryString), seedCluster.maxX + config.k + config.c) # Exclusive coordinate\n querySubstring = queryString[queryStringStart:queryStringEnd]\n \n targetStringStart = max(0, seedCluster.minY - config.c) # Inclusive coordinate\n targetStringEnd = min(len(targetString), seedCluster.maxY + config.k + config.c) # Exclusive coordinate\n targetSubstring = targetString[targetStringStart:targetStringEnd]\n \n print( \"target_aligning\", targetStringStart, targetStringEnd, targetSubstring )\n print( \"query_aligning\", queryStringStart, queryStringEnd, querySubstring )\n \n # Align the genome and read substring\n alignment = SmithWaterman(targetSubstring, querySubstring, \n gapScore=config.gapScore, \n matchScore=config.matchScore,\n mismatchScore=config.mismatchScore)\n \n # Update best alignment if needed\n if bestAlignment[0] == None or alignment.getMaxAlignmentScore() > bestAlignment[0].getMaxAlignmentScore():\n bestAlignment[0] = alignment\n \n return bestAlignment\n \n def reverseComplement(string):\n \"\"\"Computes the reverse complement of a string\n \"\"\"\n rMap = { \"A\":\"T\", \"T\":\"A\", \"C\":\"G\", \"G\":\"C\", \"N\":\"N\"}\n return \"\".join(rMap[i] for i in string[::-1])\n \n # Run mapping forwards and reverse\n mapForwards(queryString)\n mapForwards(reverseComplement(queryString))\n \n return bestAlignment[0]", "def do_semiglobal_alignment(sequences, matrix, penalty):\n seq1 = '-' + sequences[0].Sequence\n seq2 = '-' + sequences[1].Sequence\n\n # scoring matrix initializer\n scoring = local_setup(len(seq1), len(seq2))\n\n # fill scoring matrix\n aa_start = ord('A')\n for i in range(1, len(seq1)):\n aa_x = seq1[i]\n for j in range(1, len(seq2)):\n aa_y = seq2[j]\n xgap = scoring[i][j-1] - penalty\n ygap = scoring[i-1][j] - penalty\n match = scoring[i-1][j-1] + \\\n matrix[ord(aa_x) - aa_start][ord(aa_y) - aa_start]\n\n # store the max score\n scoring[i].append(max([xgap, ygap, match]))\n\n # find the max score (only the last max score)\n max_i, max_j, max_score = 0, 0, -float('inf')\n for j in range(len(scoring[-1])): # find max low road\n if scoring[-1][j] >= max_score:\n max_i, max_j, max_score = -1, j, scoring[-1][j]\n\n for i in range(len(scoring)): # find max high road (priority)\n if scoring[i][-1] >= max_score:\n max_i, max_j, max_score = i, -1, scoring[i][-1]\n\n # perform traceback\n alignment = traceback(\n scoring, seq1, seq2, penalty, matrix, max_i, max_j, semi=True\n )\n\n # add the endgaps for seq1\n if max_i == -1 and max_j != len(scoring[-1]):\n for j in range(max_j + 1, len(scoring[-1])):\n alignment[0][0] += '-'\n alignment[1][0] += ' '\n alignment[2][0] += seq2[j]\n\n # add the endgaps for seq2\n if max_j == -1 and max_i != len(scoring):\n for i in range(max_i + 1, len(scoring)):\n alignment[0][0] += seq1[i]\n alignment[1][0] += ' '\n alignment[2][0] += '-'\n\n # Add the sequences to the scoring matrix for visualizing\n scoring = add_sequences_to_scoring(scoring, seq1, seq2)\n\n return alignment, scoring", "def spectrum_alignment(self):\n self.diff_PROTEIN()\n \n score = [] #node->>([t][i][j])\n for t in range(self.post_modif+1):\n pos = 0 # position of peptide for converting mass\n score_ij = {0: [ float('-inf') for t in range(len(self.vector))]}\n for amino in self.peptide:\n score_j = [ float('-inf') for t in range(len(self.vector))]\n pos += PROTEIN_MASS[amino]\n score_ij[pos] = score_j\n score.append(score_ij)\n \n score[0][0][0] = 0\n # score for node(i,j,t)\n for t in range(self.post_modif+1):\n for i in sorted(score[t]):\n if i > 0: # i-self.diff[i]\n for j in range(len(self.vector)):\n temp_max = float('-inf')\n if j >= self.diff[i]:\n temp_max = score[t][i-self.diff[i]][j-self.diff[i]]\n if t > 0:\n for j_p in range(j):\n if temp_max < score[t-1][i-self.diff[i]][j_p]:\n temp_max = score[t-1][i-self.diff[i]][j_p]\n \n score[t][i][j] = self.vector[j] + temp_max\n \n # trace back --> the longest path\n max_score = float('-inf')\n layer = 0 # modify\n row = pos # mass\n column = len(self.vector)-1 # vector\n modify = []\n for t in range(self.post_modif+1):\n if max_score < score[t][pos][-1] :\n max_score = score[t][pos][-1]\n layer = t\n \n while layer > 0:\n score_temp = score[layer][row][column] - self.vector[column]\n if score_temp == score[layer][row-self.diff[row]][column-self.diff[row]]:\n column -= self.diff[row]\n row -= self.diff[row]\n else:\n for j_p in range(column-1):\n if score_temp == score[layer-1][row-self.diff[row]][j_p]:\n modify.append((row, column-row))\n row -= self.diff[row]\n column = j_p\n layer -= 1\n break\n \n\n # print out the sequence\n modify.sort()\n sequence = \"\"\n pos = 0\n i = 0\n mass = 0\n for amino in self.peptide:\n pos += PROTEIN_MASS[amino]\n sequence += str(amino)\n if pos == modify[i][0]:\n if i == 0:\n mass = modify[i][1]\n else:\n mass = modify[i][1]-modify[i-1][1]\n \n if mass > 0:\n sequence += \"(+\"+str(mass)+\")\"\n else:\n sequence += \"(\"+str(mass)+\")\"\n i += 1\n \n print sequence", "def mapForwards(queryString):\n # Find seed matches, aka \"aligned kmers\"\n seeds = list(minimizerIndex.getMatches(queryString))\n \n # For each cluster of seeds\n for seedCluster in SeedCluster.clusterSeeds(list(seeds), l=config.l):\n \n # Get substring of query and target to align\n queryStringStart = max(0, seedCluster.minX - config.c) # Inclusive coordinate\n queryStringEnd = min(len(queryString), seedCluster.maxX + config.k + config.c) # Exclusive coordinate\n querySubstring = queryString[queryStringStart:queryStringEnd]\n \n targetStringStart = max(0, seedCluster.minY - config.c) # Inclusive coordinate\n targetStringEnd = min(len(targetString), seedCluster.maxY + config.k + config.c) # Exclusive coordinate\n targetSubstring = targetString[targetStringStart:targetStringEnd]\n \n print( \"target_aligning\", targetStringStart, targetStringEnd, targetSubstring )\n print( \"query_aligning\", queryStringStart, queryStringEnd, querySubstring )\n \n # Align the genome and read substring\n alignment = SmithWaterman(targetSubstring, querySubstring, \n gapScore=config.gapScore, \n matchScore=config.matchScore,\n mismatchScore=config.mismatchScore)\n \n # Update best alignment if needed\n if bestAlignment[0] == None or alignment.getMaxAlignmentScore() > bestAlignment[0].getMaxAlignmentScore():\n bestAlignment[0] = alignment\n \n return bestAlignment", "def greedy_alignment(embed1, embed2, top_k, nums_threads, metric, normalize, csls_k, accurate):\n t = time.time()\n sim_mat = sim(embed1, embed2, metric=metric, normalize=normalize, csls_k=csls_k)\n num = sim_mat.shape[0]\n if nums_threads > 1:\n hits = [0] * len(top_k)\n mr, mrr = 0, 0\n alignment_rest = set()\n rests = list()\n search_tasks = task_divide(np.array(range(num)), nums_threads)\n pool = multiprocessing.Pool(processes=len(search_tasks))\n for task in search_tasks:\n mat = sim_mat[task, :]\n rests.append(pool.apply_async(calculate_rank, (task, mat, top_k, accurate, num)))\n pool.close()\n pool.join()\n for rest in rests:\n sub_mr, sub_mrr, sub_hits, sub_hits1_rest = rest.get()\n mr += sub_mr\n mrr += sub_mrr\n hits += np.array(sub_hits)\n alignment_rest |= sub_hits1_rest\n else:\n mr, mrr, hits, alignment_rest = calculate_rank(list(range(num)), sim_mat, top_k, accurate, num)\n assert len(alignment_rest) == num\n hits = np.array(hits) / num * 100\n for i in range(len(hits)):\n hits[i] = round(hits[i], 3)\n cost = time.time() - t\n if accurate:\n if csls_k > 0:\n print(\"accurate results with csls: csls={}, hits@{} = {}%, mr = {:.3f}, mrr = {:.6f}, time = {:.3f} s \".\n format(csls_k, top_k, hits, mr, mrr, cost))\n else:\n print(\"accurate results: hits@{} = {}%, mr = {:.3f}, mrr = {:.6f}, time = {:.3f} s \".\n format(top_k, hits, mr, mrr, cost))\n else:\n if csls_k > 0:\n print(\"quick results with csls: csls={}, hits@{} = {}%, time = {:.3f} s \".format(csls_k, top_k, hits, cost))\n else:\n print(\"quick results: hits@{} = {}%, time = {:.3f} s \".format(top_k, hits, cost))\n hits1 = hits[0]\n del sim_mat\n gc.collect()\n return alignment_rest, hits1, mr, mrr", "def compute_alignment_matrix(seq_x, seq_y, scoring_matrix, global_flag):\n len_x = len(seq_x)\n len_y = len(seq_y)\n\n alignment_matrix = [[0 for col in range(len_y + 1)] for row in range(len_x + 1)]\n\n for row in range(1, len_x + 1):\n possible_score = alignment_matrix[row-1][0] + scoring_matrix[\"-\"][seq_x[row-1]]\n if global_flag:\n alignment_matrix[row][0] = possible_score\n else:\n alignment_matrix[row][0] = max(0, possible_score)\n\n for col in range(1, len_y+1):\n possible_score = alignment_matrix[0][col-1] + scoring_matrix[\"-\"][seq_y[col-1]]\n if global_flag:\n alignment_matrix[0][col] = possible_score\n else:\n alignment_matrix[0][col] = max(0, possible_score)\n\n for row in range(1, len_x+1):\n for col in range(1, len_y+1):\n route1 = alignment_matrix[row-1][col-1] + scoring_matrix[seq_x[row-1]][seq_y[col-1]]\n route2 = alignment_matrix[row-1][col] + scoring_matrix[seq_x[row -1]][\"-\"]\n route3 = alignment_matrix[row][col-1] + scoring_matrix[\"-\"][seq_y[col-1]]\n possible_score = max(route1, route2, route3)\n if global_flag:\n alignment_matrix[row][col] = possible_score\n else:\n alignment_matrix[row][col] = max(0, possible_score)\n\n #for row in alignment_matrix:\n # print row\n\n return alignment_matrix", "def TM_align(PU_name, ref_pdb_name, peel_longer):\n cmdLine_TM = (\"bin/TMalign64 results/\" + PU_name + '.pdb' +\n \" results/\" + ref_pdb_name + '.pdb' + \" -o \" + \"results/\" +\n PU_name + '.sup')\n\n out_TM = sub.Popen(cmdLine_TM.split(), stdout=sub.PIPE).communicate()[0]\n lines_TM = out_TM.decode()\n\n if peel_longer: # If peeled prot is longer, we get \"normalized by chain 2\"\n regex_TMalign = re.compile(\"(?:TM-score.+)([0]\\.[0-9]*)(?:.+Chain_2)\")\n else: # Else we get TMscore \"normalized by chain 1\"\n regex_TMalign = re.compile(\"(?:TM-score.+)([0]\\.[0-9]*)(?:.+Chain_1)\")\n searchObj = re.search(regex_TMalign, lines_TM)\n\n # Remove useless files:\n for ext in (\".sup_all_atm_lig\", \".sup_all\", \".sup\"):\n os.remove(\"results/\" + PU_name + ext)\n\n return float(searchObj.group(1))", "def _find_best_fit(self, puzzle):\n\n word = puzzle['answer']\n\n # if first word\n print(len(self.filled_pos))\n if len(self.filled_pos) == 0:\n x = random.randint(0,4)\n y = random.randint(0,4)\n print(\"first_word: {} x:{} y:{}\".format(word, x, y))\n print(\"will_fit: {}\".format(will_fit[ACROSS](x, y, length(word, self.lang))))\n if will_fit[ACROSS](x, y, length(word, self.lang)):\n puzzle['orientation'] = \"across\"\n # puzzle['position'] = t + 1\n puzzle['startx'] = x + 1\n puzzle['starty'] = y + 1\n self._fill_word_in_matrix(word, ACROSS, (x,y))\n return puzzle\n\n # first find the location where it overlaps.. then move to the other ones to keep it interesting\n for key in self.filled_pos:\n #the orientation for this word should be perpendicular to the one we are trying to match\n pos = int(not self.filled_pos[key]['orientation'])\n # find the intersecting letters between the two words\n intersect = find_intersection(key, word, self.lang)\n print(\"trying to intersect filled_word={} with word={}\".format(key, word))\n if len(intersect) == 0:\n # no letters matched.. lets find the next\n continue\n else:\n a = [-10, -10]\n print(\"intersecting letters={}\".format(intersect))\n for letter in intersect:\n indexes1 = find_all_char_pos(key, letter, self.lang)\n for index in indexes1:\n # index = filled_pos[key]['word'].find(letter)\n print(\"location of the letter={} in word={} is {}\".format(letter, key, index))\n filled_word_pos = self.filled_pos[key]['position']\n a[pos] = filled_word_pos[pos] + index\n indexes2 = find_all_char_pos(word, letter, self.lang)\n for index2 in indexes2:\n # index2 = word.find(letter)\n print(\"location of the letter={} in word={} is {}\".format(letter, word, index2))\n a[self.filled_pos[key]['orientation']] = filled_word_pos[int(not pos)] - index2\n print(\"looking for match in location={}\".format(a))\n print(\"will_fit={}\".format(will_fit[pos](a[0], a[1], length(word, self.lang))))\n if will_fit[pos](a[0], a[1], length(word, self.lang)):\n if not self._check_overlap(word, pos, a[0], a[1]):\n self._fill_word_in_matrix(word, pos, (a[0], a[1]))\n calculate_free_rows(self.puzzle_matrix, self.height)\n puzzle['orientation'] = \"down\" if pos else \"across\"\n # puzzle['position'] = t + 1\n puzzle['startx'] = a[0] + 1\n puzzle['starty'] = a[1] + 1\n return puzzle\n # if we are still here then we havent found a place for this word\n # fill it in an empty space\n free_blocks_across = calculate_free_rows(self.puzzle_matrix, self.height)\n print(\"@@@@@@filling a random across free_blocks_across={}\".format(free_blocks_across))\n for key, val in sorted(free_blocks_across.items()):\n print(\"key={} val={}\".format(key, val))\n if key >= length(word, self.lang):\n pos = val.pop(random.randint(0, len(val)-1 ))\n if will_fit[ACROSS](pos[0], pos[1], length(word, self.lang)) and not self._check_overlap(word, ACROSS, pos[0], pos[1]):\n self._fill_word_in_matrix(word, ACROSS, (pos))\n puzzle['orientation'] = \"across\"\n puzzle['startx'] = pos[0] + 1\n puzzle['starty'] = pos[1] + 1\n return puzzle", "def needleman_wunsch(\n seq1, seq2, match=1, mismatch=-1, gap_open=-5, gap_extend=-3, at_genome_start=False\n):\n alignments = pairwise2.align.globalms(\n seq1,\n seq2,\n match,\n mismatch,\n gap_open,\n gap_extend,\n )\n # Alignments is a list of tuples. Each tuple has length 5. Entries:\n # 0: seq1 alignment (ie with dashes for indels)\n # 1: seq2 alignemnt\n # 2: alignment score\n # 4, 5: don't know (not using them)\n if len(alignments) == 1:\n return alignments[0][0], alignments[0][1]\n\n if at_genome_start:\n best_pos = last_gap_end_in_string(alignments[0][1])\n else:\n best_pos = alignments[0][1].find(\"-\")\n\n best = alignments[0]\n\n for a in alignments[1:]:\n if at_genome_start:\n gap_pos = last_gap_end_in_string(a[1])\n else:\n gap_pos = a[1].find(\"-\")\n\n if gap_pos > best_pos:\n best = a\n best_pos = gap_pos\n\n return best[0], best[1]", "def edit_distance(self):\n\n edit_dist = 0\n misaligned = False\n\n try:\n with open(self.output_file, 'r') as output_file, open(self.gt_file, 'r') as gt_file:\n\n out_lines = output_file.readlines()\n gt_lines = [g.strip() for g in gt_file.readlines()]\n\n num_symbols = 0\n bd = 0\n # Go through all lines (for polyphony)\n for i in range(len(out_lines)):\n # Skip comparing sequence staff line\n if 'Sequence staff' in gt_lines[i]:\n continue\n\n out_split = out_lines[i].split()\n gt_split = gt_lines[i].split()\n\n #print('Out:',out_split)\n #print('Gt:',gt_split)\n\n num_symbols += len(gt_split) # for calculating symbol error rate\n misaligned = 'misaligned' in out_lines[i] # for ensembling\n\n _a = [symbol for symbol in out_split if symbol != '\\n' and symbol != -1]\n _b = [symbol for symbol in gt_split if symbol != '\\n' and symbol != -1]\n\n ed = self.levenshtein(_a,_b)\n \n # Account for barline at end (don't use when checking CRNN output)\n #if ed == 1 and out_split[-1] == 'barline' and gt_split[-1] != 'barline':\n # ed = 0\n \n edit_dist += ed\n \n staff_num = (i + 1) // 2\n \n if ed == 1:\n pass\n #print(self.output_file)\n #print('Edit dist (staff #%d): %d' % (staff_num, ed))\n \n if _a[-1] == 'barline' and _b[-1] != 'barline' or \\\n _a[-1] != 'barline' and _b[-1] == 'barline':\n #print('Barline diff') \n # print(self.output_file)\n bd = 1\n #print(_a)\n #print(_b)\n \n\n '''\n if len(out_split) != len(gt_split):\n return 0\n\n for j in range(len(out_split)):\n # Treat slur and tie as equivalent\n if out_split[j] != gt_split[j] and\\\n ('slur' not in out_split[j] and 'tie' not in out_split[j]) and\\\n ('slur' not in gt_split[j] and 'tie' not in gt_split[j]):\n return 0\n '''\n except FileNotFoundError:\n print('Missing:',self.output_file, self.gt_file)\n return -1, 1, 0, False\n #print('Found:',self.output_file, self.gt_file)\n return edit_dist, num_symbols, bd, misaligned", "def gap_align(center, string_w):\n m = len(center)\n n = len(string_w)\n\n # Initialization; D[i][j][0] contains the max alignment score of the\n # ith prefix of v and the jth of w; D[i][j][1] contains the back pointer.\n D = [[(0, START) for _ in range(n + 1)] for _ in range(m + 1)]\n\n for i in range(1, m + 1):\n D[i][0] = (D[i - 1][0][0] + blosum['-', center[i - 1]], DELETE)\n\n for j in range(1, n + 1):\n D[0][j] = (D[0][j - 1][0] + blosum['-', string_w[j - 1]], INSERT)\n\n # Recurrence\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n delete = D[i-1][j][0] + blosum[center[i - 1], '-']\n substitute = D[i-1][j-1][0] + blosum[center[i - 1], string_w[j - 1]]\n # Set D[i][j] to the max of the recurrences\n if delete > substitute:\n D[i][j] = (delete, DELETE)\n else:\n D[i][j] = (substitute, SUBSTITUTE)\n\n i, j = m, n\n w_aligned = ''\n back_pointer = D[i][j][1]\n while back_pointer != START:\n if back_pointer == DELETE:\n i -= 1\n w_aligned = '-' + w_aligned\n\n elif back_pointer == SUBSTITUTE:\n i -= 1\n j -= 1\n w_aligned = string_w[j] + w_aligned\n\n \n back_pointer = D[i][j][1]\n \n return w_aligned", "def question1():\n \n # load sequences and scoring matrix\n score_matrix = read_scoring_matrix(PAM50_URL)\n human_eyeless = read_protein(HUMAN_EYELESS_URL)\n fruitfly_eyeless = read_protein(FRUITFLY_EYELESS_URL)\n \n # compute local alignment matrix\n align_matrix = student.compute_alignment_matrix(human_eyeless, fruitfly_eyeless, \n score_matrix, False)\n \n # compute local alignment score and sequences\n score, human_align, fruitfly_align = student.compute_local_alignment(human_eyeless, fruitfly_eyeless,\n score_matrix, align_matrix)\n \n print \"Score: \" + str(score)\n print \"Human: \" + human_align\n print \"FrFly: \" + fruitfly_align\n \n return", "def affine_align(x, y, p1, p2, g, s):\n #Create M, Ix, and Iy as Y x X matrices of 0's\n M = [[0]*(len(x)+1) for i in range(len(y)+1)]\n Ix = [[0]*(len(x)+1) for i in range(len(y)+1)]\n Iy = [[0]*(len(x)+1) for i in range(len(y)+1)]\n #Set up initial values for Ix and Iy\n #M infs along both axes\n for i in range(1, len(y)+1):\n M[i][0] = -math.inf\n for j in range(1, len(x)+1):\n M[0][j] = -math.inf\n #Ix: Aligning X with gap, horizontal move, infs along top row\n for i in range(0, len(y)+1):\n Ix[i][0] = -math.inf\n #Gap penalties along left column\n for j in range(1, len(x)+1):\n Ix[0][j] = -g if Ix[0][j-1] == -math.inf else Ix[0][j-1] - s\n #Iy: Aligning Y with gap, vertical move, infs along left column\n for j in range(0, len(x)+1):\n Iy[0][j] = -math.inf\n #Gap penalties along top row\n for i in range(1, len(y)+1):\n Iy[i][0] = -g if Iy[i-1][0] == -math.inf else Iy[i-1][0] - s\n #Populate remaining cells\n for i in range(1, len(y)+1):\n for j in range(1, len(x)+1):\n M[i][j] = max(M[i-1][j-1] + delta(x[j-1], y[i-1], p1, p2),\n Ix[i-1][j-1] + delta(x[j-1], y[i-1], p1, p2),\n Iy[i-1][j-1] + delta(x[j-1], y[i-1], p1, p2))\n Ix[i][j] = max(M[i][j-1] - g,\n Ix[i][j-1] - s)\n Iy[i][j] = max(M[i-1][j] - g,\n Iy[i-1][j] - s)\n #TRACEBACK\n x_ret=\"\"; y_ret=\"\"\n i = len(y); j = len(x)\n #Determine start matrix\n align_scores = (M[i][j], Iy[i][j], Ix[i][j])\n matrix_idx = align_scores.index(max(align_scores))\n #matrix_key will track the current matrix through the traceback\n matrix_key = [\"M\", \"Iy\", \"Ix\"][matrix_idx]\n while i > 0 and j > 0:\n #From M: Check diagonal moves back to all three matrices, align characters\n if matrix_key == \"M\":\n if M[i][j] == M[i-1][j-1] + p1 or M[i][j] == M[i-1][j-1] - p2:\n x_ret = x[j-1] + x_ret\n y_ret = y[i-1] + y_ret\n i -= 1; j -= 1\n matrix_key = \"M\"\n elif M[i][j] == Iy[i-1][j-1] + p1 or M[i][j] == Iy[i-1][j-1] - p2:\n x_ret = x[j-1] + x_ret\n y_ret = y[i-1] + y_ret\n i -= 1; j -= 1\n matrix_key = \"Iy\"\n elif M[i][j] == Ix[i-1][j-1] + p1 or M[i][j] == Ix[i-1][j-1] - p2:\n x_ret = x[j-1] + x_ret\n y_ret = y[i-1] + y_ret\n i -= 1; j -= 1\n matrix_key = \"Ix\"\n #From Iy: Check vertical move to Iy and M, align y character with x gap\n elif matrix_key == \"Iy\":\n if Iy[i][j] == M[i-1][j] - g:\n x_ret = \"_\" + x_ret\n y_ret = y[i-1] + y_ret\n i -= 1\n matrix_key = \"M\"\n elif Iy[i][j] == Iy[i-1][j] - s:\n x_ret = \"_\" + x_ret\n y_ret = y[i-1] + y_ret\n i -= 1\n matrix_key = \"Iy\"\n #From Ix: Check horizontal move to Ix and M, align x character with y gap\n elif matrix_key == \"Ix\":\n if Ix[i][j] == M[i][j-1] - g:\n x_ret = x[j-1] + x_ret\n y_ret = \"_\" + y_ret\n j -= 1\n matrix_key = \"M\"\n elif Ix[i][j] == Ix[i][j-1] - s:\n x_ret = x[j-1] + x_ret\n y_ret = \"_\" + y_ret\n j -= 1\n matrix_key = \"Ix\"\n #Finish sequence if edge was reached\n #i>0 means mach remaining characters in y with gaps in x\n if i > 0:\n x_ret = (\"_\"*i) + x_ret\n y_ret = y[0:i] + y_ret\n #j>0 means mach remaining characters in x with gaps in y\n if j > 0:\n x_ret = x[0:j] + x_ret\n y_ret = (\"_\"*j) + y_ret\n #Return alinged strings\n return (x_ret, y_ret)", "def _calc_multiple_alignment_score(wrapped_data : tuple) -> int: \n (start, finish) = wrapped_data \n score_sum = 0.\n for dna_record in tqdm(dna_sequences[start : finish + 1], total=(finish + 1 - start), desc=\"Training process\"):\n score_sum += self.aligner.score(seq, dna_record.seq)\n return score_sum", "def analize(slugs, parameters_for_align, alpha_variability, alpha2_variability, beta_variability):\n i = alpha_variability[0]\n bestI = 0\n bestResult = 0\n while i < alpha_variability[1]:\n print(\"ALPHA=\"+str(i))\n align.ALPHA = i\n align.align_particular(parameters_for_align)\n current=main(slugs, True, False)\n if current>bestResult:\n bestResult = current\n bestI = i\n i += alpha_variability[2]\n align.ALPHA = bestI\n i = alpha2_variability[0]\n bestI2 = 0\n bestResult2 = 0\n while i < alpha2_variability[1]:\n print(\"ALPHA2=\"+str(i))\n align.ALPHA2 = i\n align.align_particular(parameters_for_align)\n current=main(slugs, False, False)\n if current>bestResult2:\n bestResult2 = current\n bestI2 = i\n i += alpha2_variability[2]\n align.ALPHA2 = bestI2\n i = beta_variability[0]\n bestI3 = 0\n bestResult3 = bestResult2\n while i < beta_variability[1]:\n print(\"BETHA=\" + str(i))\n align.BETHA = i\n align.align_particular(parameters_for_align)\n current = main(slugs, False, False)\n if current > bestResult3:\n bestResult3 = current\n bestI3 = i\n i += beta_variability[2]\n print(\"Best ALPHA=\"+str(bestI))\n print(\"Best ALPHA2=\" + str(bestI2))\n print(\"Best BETHA=\" + str(bestI3))\n print(\"Best result=\" + str(bestResult3))", "def do_local_alignment(sequences, matrix, penalty):\n seq1 = '-' + sequences[0].Sequence\n seq2 = '-' + sequences[1].Sequence\n\n # scoring matrix initializer\n scoring = local_setup(len(seq1), len(seq2))\n\n # fill scoring matrix\n aa_start = ord('A')\n for i in range(1, len(seq1)):\n aa_x = seq1[i]\n for j in range(1, len(seq2)):\n aa_y = seq2[j]\n xgap = scoring[i][j-1] - penalty\n ygap = scoring[i-1][j] - penalty\n match = scoring[i-1][j-1] + \\\n matrix[ord(aa_x) - aa_start][ord(aa_y) - aa_start]\n\n # store the max score (including 0)\n scoring[i].append(max([xgap, ygap, match, 0]))\n\n # find the max score (only the last max score)\n max_i, max_j, max_score = 0, 0, -float('inf')\n for i in range(len(scoring)):\n for j in range(len(scoring[i])):\n if scoring[i][j] > max_score:\n max_i, max_j, max_score = i, j, scoring[i][j]\n\n # perform traceback\n alignment = traceback(\n scoring, seq1, seq2, penalty, matrix, max_i, max_j, local=True\n )\n # Add the sequences to the scoring matrix for visualizing\n scoring = add_sequences_to_scoring(scoring, seq1, seq2)\n\n return alignment, scoring", "def align(self, *, skip_corners=False, return_on_invalid_result=False, warpwarnings=False, **kwargs):\n #load the images for all HPFs and keep them in memory as long as\n #the AlignSample is active\n self.getDAPI()\n self.logger.info(\"starting alignment\")\n\n weighted_sum_mse = 0.\n sum_weights = 0.\n done = set()\n\n for i, overlap in enumerate(self.overlaps, start=1):\n if skip_corners and overlap.tag in [1,3,7,9] :\n continue\n self.logger.debug(f\"aligning overlap {overlap.n} ({i}/{len(self.overlaps)})\")\n result = None\n #check if the inverse overlap has already been aligned\n #(e.g. if the current overlap is between (1, 2), check the overlap between (2, 1))\n #if so, we don't have to align again\n if self.inverseoverlapsdictkey(overlap) in done:\n inverseoverlap = self.overlapsdict[self.inverseoverlapsdictkey(overlap)]\n if hasattr(inverseoverlap, \"result\"):\n result = overlap.getinversealignment(inverseoverlap)\n #do the alignment\n if result is None:\n result = overlap.align(gputhread=self.gputhread, gpufftdict=self.gpufftdict, **kwargs)\n done.add(self.overlapsdictkey(overlap))\n\n #contribution of the mean squared difference after alignment\n #to the weighted sum\n if result is not None and result.exit == 0: \n w = (overlap.cutimages[0].shape[0]*overlap.cutimages[0].shape[1])\n weighted_sum_mse+=w*result.mse[2]\n sum_weights+=w\n else :\n if result is None:\n reason = \"is None\"\n else:\n reason = f\"has exit status {result.exit}\"\n if return_on_invalid_result :\n if warpwarnings: self.logger.warningglobal(f'Overlap number {i} alignment result {reason}: returning 1e10!!')\n return 1e10\n else :\n if warpwarnings: self.logger.warningglobal(f'Overlap number {i} alignment result {reason}: adding 1e10 to sum_mse!!')\n w = (overlap.cutimages[0].shape[0]*overlap.cutimages[0].shape[1])\n weighted_sum_mse+=w*1e10\n sum_weights+=w\n\n self.logger.info(\"finished align loop for \"+self.SlideID)\n return weighted_sum_mse/sum_weights", "def get_best_alignment_score(dna1, dna2, match = 1, mismatch = -1, gap = -2):\n if dna1 == '':\n return (gap*len(dna2), '-'*len(dna2) , dna2)\n if dna2 == '':\n return (gap*len(dna1), dna1 , '-'*len(dna1))\n \n best_case = list(get_best_alignment_score(dna1[1:], dna2[1:], match, \\\n mismatch, gap))\n best_case[0] = get_alignment_score(dna1[0],dna2[0], match, mismatch,\\\n gap) + best_case[0]\n best_case[1] = dna1[0] + best_case[1]\n best_case[2] = dna2[0] + best_case[2]\n \n best_case_attempt = list(get_best_alignment_score(dna1, dna2[1:], match, \\\n mismatch, gap))\n \n best_case_attempt[0] = get_alignment_score('-',dna2[0], match, mismatch,\\\n gap) + best_case_attempt[0]\n best_case_attempt[1] = '-' + best_case_attempt[1]\n best_case_attempt[2] = dna2[0] + best_case_attempt[2]\n\n if best_case[0] < best_case_attempt[0]:\n best_case = best_case_attempt\n\n best_case_attempt = list(get_best_alignment_score(dna1[1:], dna2, match, \\\n mismatch, gap))\n best_case_attempt[0] = get_alignment_score(dna1[0],'-', match, mismatch,\\\n gap) + best_case_attempt[0]\n best_case_attempt[1] = dna1[0] + best_case_attempt[1]\n best_case_attempt[2] = '-' + best_case_attempt[2]\n \n if best_case[0] < best_case_attempt[0]:\n best_case = best_case_attempt\n \n return tuple(best_case)", "def leveinshtein_distance(source,target):\r\n\t#Step 1\r\n\ts_len=len(source)\r\n\tt_len=len(target)\r\n\tcost=0\r\n\tif(s_len==0):\r\n\t\treturn t_len\r\n\tif(t_len==0):\r\n\t\treturn s_len\r\n\tprint(\"Dimensions:\\n\\tN:%d\\n\\tM:%d\"%(s_len,t_len))\r\n\t#Step 2\r\n\tmatrix=[[0 for _ in range(0,t_len+1)] for _ in range(0, s_len+1)]\r\n\t#Initialize first row 0..s_len\r\n\tfor idx in range(0,s_len+1):\r\n\t\tmatrix[idx][0]=idx\r\n\t#Initialize the first column 0..t_len\r\n\tfor idx in range(0, t_len+1):\r\n\t\tmatrix[0][idx]=idx\r\n\tprint(\"===Original===\")\r\n\tprint_matrix(matrix,source,target)\r\n\t#Step 3\r\n\tfor i in range(1,s_len+1):\r\n\t\tch=source[i-1]\r\n\t\t#print(ch)\r\n\t\t#Step 4\r\n\t\tfor j in range(1,t_len+1):\r\n\t\t\t#print(\">%s\"%target[j-1])\r\n\t\t\t#Step 5\r\n\t\t\tif ch==target[j-1]:\r\n\t\t\t\tcost=0\r\n\t\t\telse:\r\n\t\t\t\tcost=1\r\n\t\t\t#Step 6\r\n\t\t\t\r\n\t\t\t#print(\"(i,j)=>(%d,%d)\"%(i,j))\r\n\t\t\t#print(matrix[i][j])\r\n\t\t\tmatrix[i][j]=minimum(\r\n\t\t\t\tmatrix[i-1][j]+1,\r\n\t\t\t\tmatrix[i][j-1]+1,\r\n\t\t\t\tmatrix[i-1][j-1]+cost\r\n\t\t\t)\r\n\tprint(\"===Final Matrix===\")\r\n\tprint_matrix(matrix,source,target)\r\n\treturn matrix[s_len-1][t_len-1]", "def alignmentScore(self, optimizableVariables):\n from pytom.reconstruction.tiltAlignmentFunctions import markerResidual, refMarkerResidualForTiltImage as refResidual\n import numpy\n self.setOptimizableVariables(self.TiltSeries_._TiltAlignmentParas, optimizableVariables)\n\n if self.TiltSeries_._TiltAlignmentParas.leastsq == True:\n score = markerResidual(self.TiltSeries_._TiltAlignmentParas.cent,\n Markers_=self._Markers,\n cTilt=self._cTilt, sTilt=self._sTilt,\n transX=self._alignmentTransX, transY=self._alignmentTransY,ireftilt=self.ireftilt,\n rotInPlane=self._alignmentRotations,irefmark=self.irefmark, tiltangles=self._tiltAngles,\n isoMag=self._alignmentMagnifications, dBeam=self._alignmentBeamTilt,\n dMagnFocus=None, dRotFocus=None, equationSet=True)\n else:\n score = markerResidual(self.TiltSeries_._TiltAlignmentParas.cent,\n Markers_=self._Markers,\n cTilt=self._cTilt, sTilt=self._sTilt,\n transX=self._alignmentTransX, transY=self._alignmentTransY,\n rotInPlane=self._alignmentRotations,\n isoMag=self._alignmentMagnifications, dBeam=self._alignmentBeamTilt,\n dMagnFocus=None, dRotFocus=None, equationSet=False)\n self.sum_called += 1\n\n # for n, q in enumerate(optimizableVariables[-len(self._sTilt):]):\n # score += self.q[n] * refResidual(self.TiltSeries_._TiltAlignmentParas.cent,\n # Marker=self._Markers[self.TiltSeries_._TiltAlignmentParas.irefmark],\n # cTilt=self._cTilt, sTilt=self._sTilt,\n # transX=self._alignmentTransX, transY=self._alignmentTransY,\n # rotInPlane=self._alignmentRotations, iproj=n,\n # isoMag=self._alignmentMagnifications, dBeam=self._alignmentBeamTilt,\n # dMagnFocus=None, dRotFocus=None, equationSet=False)\n\n #print(numpy.sqrt(score))\n return score", "def readalign(self, opt, fh):\n## print \"entering readalign:\", opt\n edgeInfo = {}\n for p in opt:\n (key, value) = p.split('=')\n edgeInfo[key] = value\n\n s = fh.readline().split()\n## print s;\n if(len(s) == 7 and s[0] == 's'):\n vseq = self._vseq(len(s[6]))\n self.mAlign += vseq\n while len(s) == 7 and s[0] == 's':\n # Add the sequence name to the dictionary,\n # then add a corresponding node to the mapping.\n if s[1] not in self.sequences:\n self.sequences[s[1]] = AnonSequence(int(s[5]), s[1])\n self.mAlign += self.sequences[s[1]]\n\n # PROCESS THE KNOWN INTERVALS\n if(s[4] == '-'):\n ns = self.sequences[s[1]][-int(s[2]):-int(s[2]) - int(s[3])]\n self.sequences[s[1]].seqsplice(reverse_complement(\n s[6].replace('-', '')), ns.start, ns.stop)\n else:\n ns = self.sequences[s[1]][int(s[2]):int(s[2]) + int(s[3])]\n self.sequences[s[1]].seqsplice(s[6].replace('-', ''),\n ns.start, ns.stop)\n\n for inter in refIntervals(s[6]):\n self.mAlign[vseq[inter[0]:inter[1]]][ns[inter[2]:inter[3]]] = \\\n (inter[4])\n self.mAlign[ns[inter[2]:inter[3]]][vseq[inter[0]:inter[1]]] = \\\n (inter[4])\n\n s = fh.readline().split()", "def align(args) :\n from aligner import align_reads\n align_reads(args)", "def em_algorithm(self,bitext,max_iter = 5):\n max_iterations = max_iter\n f_words,e_words = self.get_words(bitext) # get vocabulary in each language from the corpus\n fw_count = len(f_words)\n ew_count = len(e_words)\n t_prob = self.inital_probabilities(fw_count,ew_count) # inital translation probabilities\n iteration_count = 0\n converged = False\n alignments,sentence_alignments = self.get_corpus_alignments(bitext,f_words,e_words)\n while not converged and iteration_count < max_iterations:\n #find alignment probabilities\n t_prob_prev = deepcopy(t_prob) ## copying the previous iteeration probability\n ## Expectation Step\n a_prob = self.get_alignment_prob(alignments,sentence_alignments,t_prob)\n ## Maximization Step -Finds new translation Probabilities\n self.update_fractional_counts(alignments,a_prob,t_prob,sentence_alignments)\n iteration_count += 1\n converged = self.is_converged(t_prob,t_prob_prev)\n self.alignment_prob = a_prob\n print(\"Algorithm converged after \",iteration_count,\" iterations\")\n self.translation_table = t_prob\n chosen_a_idxs = self.get_final_alignments(a_prob,sentence_alignments)\n self.alignment_words,self.alignment_idx = self.formatted_alignments(chosen_a_idxs,bitext,alignments,e_words,f_words)", "def do_global_alignment(sequences, matrix, penalty):\n seq1 = '-' + sequences[0].Sequence\n seq2 = '-' + sequences[1].Sequence\n\n # scoring matrix initializer\n scoring = global_setup(len(seq1), len(seq2), penalty)\n\n # fill scoring matrix\n aa_start = ord('A')\n for i in range(1, len(seq1)):\n aa_x = seq1[i]\n for j in range(1, len(seq2)):\n aa_y = seq2[j]\n xgap = scoring[i][j-1] - penalty\n ygap = scoring[i-1][j] - penalty\n match = scoring[i-1][j-1] + \\\n matrix[ord(aa_x) - aa_start][ord(aa_y) - aa_start]\n\n # store the max value of them all\n scoring[i].append(max([xgap, ygap, match]))\n\n # Perform traceback\n alignment = traceback(scoring, seq1, seq2, penalty, matrix)\n # Add the sequences to the scoring matrix for visualizing\n scoring = add_sequences_to_scoring(scoring, seq1, seq2)\n\n return alignment, scoring", "def equalize(pair, bias_axis, word_to_vec_map):\n\n ### START CODE HERE ###\n # Step 1: Select word vector representation of \"word\". Use word_to_vec_map. (≈ 2 lines)\n w1, w2 = pair\n e_w1, e_w2 = (word_to_vec_map[w1], word_to_vec_map[w2])\n\n # Step 2: Compute the mean of e_w1 and e_w2 (≈ 1 line)\n mu = (e_w1 + e_w2) / 2\n\n # Step 3: Compute the projections of mu over the bias axis and the orthogonal axis (≈ 2 lines)\n mu_B = np.dot(mu, bias_axis) / np.sum(np.dot(bias_axis, bias_axis)) * bias_axis\n mu_orth = mu - mu_B\n\n # Step 4: Use equations (7) and (8) to compute e_w1B and e_w2B (≈2 lines)\n e_w1B = np.dot(e_w1, bias_axis) / np.sum(np.dot(bias_axis, bias_axis)) * bias_axis\n e_w2B = np.dot(e_w2, bias_axis) / np.sum(np.dot(bias_axis, bias_axis)) * bias_axis\n\n # Step 5: Adjust the Bias part of e_w1B and e_w2B using the formulas (9) and (10) given above (≈2 lines)\n corrected_e_w1B = np.sqrt(np.abs(1 - np.sum(np.dot(mu_orth, mu_orth)))) * (e_w1B - mu_B) / np.sqrt(\n np.sum(np.dot(e_w1 - mu_orth - mu_B, e_w1 - mu_orth - mu_B)))\n corrected_e_w2B = np.sqrt(np.abs(1 - np.sum(np.dot(mu_orth, mu_orth)))) * (e_w2B - mu_B) / np.sqrt(\n np.sum(np.dot(e_w2 - mu_orth - mu_B, e_w2 - mu_orth - mu_B)))\n\n # Step 6: Debias by equalizing e1 and e2 to the sum of their corrected projections (≈2 lines)\n e1 = corrected_e_w1B + mu_orth\n e2 = corrected_e_w2B + mu_orth\n\n ### END CODE HERE ###\n\n return e1, e2", "def dict_judge1(_str1):\n\tglobal final_output\n\tif _str1==\"\":\n\t\treturn 'Finished.'\n\t_list0=dict_check34(_str1)\n\t#Judge1: Longest\n\t_list=[]\n\t_list1=[]\n\tfor i in range(len(_list0)):\n\t\tn=0\n\t\tfor j in range(3):\n\t\t\tn+=len(_list0[i][j])\n\t\t_list.append(n)\n\n\t_max=max(_list)\n\tfor i in range(len(_list0)):\n\t\tif _list[i]==_max:\n\t\t\twhile '' in _list0[i]:\n\t\t\t\t_list0[i].remove('')\n\t\t\tif not _list0[i] in _list1:\n\t\t\t\t_list1.append(_list0[i])\n\n\t#Judge2: Max Average Length\n\tif len(_list1)==1:\n\t\t_list2=_list1\n\telse:\n\t\t_list=[]\n\t\t_list2=[]\n\t\tfor i in range(len(_list1)):\n\t\t\tn=0\n\t\t\tfor j in range(len(_list1[i])):\n\t\t\t\tn+=len(_list1[i][j])\n\t\t\t_list.append(n/len(_list1[i]))\n\n\t\t_max=max(_list)\n\t\tfor i in range(len(_list1)):\n\t\t\tif _list[i]==_max:\n\t\t\t\t_list2.append(_list1[i])\n\n\t#Judge3: Take Variance for guarantee they're same patern\n\tif len(_list2)==1:\n\t\t_list3=_list2\n\telse:\n\t\t_list=[]\n\t\t_list3=[]\n\t\tfor i in range(len(_list2)):\n\t\t\tn=0\n\t\t\tfor j in range(len(_list2[i])):\n\t\t\t\tn+=len(_list2[i][j])**2\n\t\t\t_list.append(n/len(_list2[i]))\n\n\t\t_max=max(_list)\n\t\tfor i in range(len(_list2)):\n\t\t\tif _list[i]==_max:\n\t\t\t\t_list3.append(_list2[i])\n\n\t#Judge4: Single Word Frequency\n\tif len(_list3)==1:\n\t\t_list4=_list3\n\telse:\n\t\t_min=4\n\t\tfor i in range(len(_list3)):\n\t\t\tfor j in range(len(_list3[i])):\n\t\t\t\tif len(_list3[i][j])<_min:\n\t\t\t\t\t_min=len(_list3[i][j])\n\t\t_list=[]\n\t\t_list4=[]\n\t\tfor i in range(len(_list3)):\n\t\t\tn=0\n\t\t\tfor j in range(len(_list3[i])):\n\t\t\t\tif len(_list3[i][j])==_min:\n\t\t\t\t\tn+=_dict_ori[_list3[i][j]]\n\t\t\t_list.append(n)\n\n\t\t_max=max(_list)\n\t\tfor i in range(len(_list3)):\n\t\t\tif _list[i]==_max:\n\t\t\t\t_list4.append(_list3[i])\n\n\t#Output\n\tif len(_list4)!=1:\n\t\t_list4=_list4[0]\n\tif len(''.join(_list4[0]))==len(_str1):\n\t\tfinal_output=final_output+(' '.join(_list4[0]))\n\telse:\n\t\tfinal_output=final_output+_list4[0][0]+' '\n\t\tdict_judge1(_str1[len(_list4[0][0]):])", "def beam_search_stack_decode(self, src_sentence):\n for i in xrange(len(src_sentence)):\n for j in xrange(i+1, len(src_sentence)):\n foreign_phrase = src_sentence[i:j]\n best_phrase = \"\"\n highest_prob = float('-inf')\n for native_phrase, translation_probability in self.phrase_table[foreign_phrase]:\n prob = translation_probability + self.language_model.score(native_phrase.split())\n if prob > highest_prob:\n highest_prob = prob\n best_phrase = native_phrase\n self.heuristic_table[i][j] = (best_phrase, highest_prob)\n\n\n\n hypStacks = [ HypoStack() for i in len(src_sentence)]\n \"\"\"\n What will the cost of NULL Hypothesis be? Right now I don't know what it calculates but I'm enqueueing it with FLT_MAX\n \"\"\"\n hypStacks[0].add(Hypothesis([], '0' * len(src_sentence), 0, 0, None, self.heuristic_table, self.language_model, self.phrase_table), float('inf'))\n\n for hypStack in hypStacks:\n while len(hypStack) > 0:\n hyp = hypStack.pop()\n new_hyps = self.derive_new_hyps(hyp)\n for new_hyp in new_hyps:\n nf_new_hyp = new_hyp.covering.count('1')\n \"\"\"\n DONE\n For recombination, whenever you add a new hypothesis, look at the other hypotheses in the same stack and do this:\n – same number of foreign words translated\n – same last three English words in output\n – same last foreign word translated\n \"\"\"\n new_hyp_trigram = new_hyp.get_trigram()\n new_hyp_last_word = src_sentence[new_hyp.fp_end]\n add_new_hyp = True\n for other_hyp in hypStacks[nf_new_hyp]:\n if other_hyp.get_trigram() == new_hyp_trigram and src_sentence[other_hyp.fp_end] == new_hyp_last_word:\n if other_hyp.cost < new_hyp.cost:\n add_new_hyp = False\n break\n else:\n hypStacks[nf_new_hyp].remove(other_hyp)\n if not add_new_hyp: continue\n hypStacks[nf_new_hyp].add(new_hyp, new_hyp.cost)\n if len(hypStacks[nf_new_hyp]) > MAX_STACK_LEN:\n hypStacks[nf_new_hyp].remove_worst()\n return hypStacks[-1].pop()", "def ALIGNF(km_list, ky):\n n_feat = len(km_list)\n\n #km_list_copy = []\n # center the kernel first\n #for i in range(n_feat):\n # km_list_copy.append(center(km_list[i].copy()))\n #ky_copy = center(ky.copy())\n\n\n a = np.zeros(n_feat)\n for i in range(n_feat):\n a[i] = f_dot(km_list[i], ky)\n\n M = np.zeros((n_feat, n_feat))\n for i in range(n_feat):\n for j in range(i,n_feat):\n M[i,j] = f_dot(km_list[i],km_list[j])\n M[j,i] = M[i,j]\n\n Q = 2*M\n C = -2*a\n\n Q = Q + np.diag(np.ones(n_feat)*1e-8)\n\n ################################################\n # Using mosek to solve the quadratice programming\n\n # Set upper diagonal element to zeros, mosek only accept lower triangle\n iu = np.triu_indices(n_feat,1)\n Q[iu] = 0\n\n # start solving with mosek\n inf = 0.0\n env = mosek.Env()\n env.set_Stream(mosek.streamtype.log, streamprinter)\n\n # Create a task \n task = env.Task()\n task.set_Stream(mosek.streamtype.log, streamprinter)\n\n # Set up bound for variables \n bkx = [mosek.boundkey.lo]* n_feat\n blx = [0.0] * n_feat\n #bkx = [mosek.boundkey.fr]* n_feat\n #blx = [-inf] * n_feat\n bux = [+inf] * n_feat\n\n numvar = len(bkx)\n\n task.appendvars(numvar)\n\n for j in range(numvar):\n task.putcj(j,C[j])\n task.putvarbound(j,bkx[j],blx[j],bux[j])\n\n # Set up quadratic objective \n inds = np.nonzero(Q)\n qsubi = inds[0].tolist()\n qsubj = inds[1].tolist()\n qval = Q[inds].tolist()\n\n # Input quadratic objective \n task.putqobj(qsubi,qsubj,qval)\n\n # Input objective sense (minimize/mximize) \n task.putobjsense(mosek.objsense.minimize)\n\n task.optimize()\n\n # Print a summary containing information \n # about the solution for debugging purposes \n task.solutionsummary(mosek.streamtype.msg)\n\n solsta = task.getsolsta(mosek.soltype.itr)\n if (solsta == mosek.solsta.optimal or\n solsta == mosek.solsta.near_optimal):\n # Output a solution \n xx = np.zeros(numvar, float)\n task.getxx(mosek.soltype.itr, xx)\n #xx = xx/np.linalg.norm(xx)\n return xx\n else:\n print solsta\n xx = np.zeros(numvar, float)\n task.getxx(mosek.soltype.itr, xx)\n #xx = xx/np.linalg.norm(xx)\n return xx", "def _get_best_grading():\n possible_main_grading = np.array([10, 20, 25, 40, 50,\n 100, 200, 250, 400,\n 500, 1000])\n total_dist = length * dpp\n # Get the most suitable grading\n num_gradings = total_dist / possible_main_grading\n best_grading = possible_main_grading[num_gradings <= 7][0]\n best_num = num_gradings[num_gradings <= 7][0]\n dist = np.arange(best_num) * best_grading\n pos = (dist / dpp).astype(np.int)\n texts = [\"{0:d}\".format(int(d)) for d in dist]\n return pos, texts", "def align(s0, s1, backptr): # Lägger de rätt för att få bästa matching med letters\r\n\r\n # Tom array att fylla på och returnera\r\n result = ['','']\r\n # Gör strängarna till lätthanterligare arrays\r\n x0 = [char for char in s0]\r\n x1 = [char for char in s1]\r\n # toma arrays att stoppa in de alignade bokstäverna och spacen\r\n ress0 = []\r\n ress1 = []\r\n\r\n rows = len(s0)\r\n columns = len(s1)\r\n\r\n # Tillverkar botten värden i backptr-matrisen så att man skall veta när man är klar\r\n backptr[0][0][0] = -1\r\n backptr[0][0][1] = -1\r\n\r\n # initiering värden vart i backptr-matrisen man är och har kommit ifrån\r\n staterow = rows\r\n statecol = columns\r\n r = staterow\r\n c = statecol\r\n # den går går längst pekarna tills den når botten\r\n while staterow >=0 and statecol >=0:\r\n # Om den pekar diagonalt\r\n if backptr[staterow][statecol][0] == staterow-1 and backptr[staterow][statecol][1] == statecol-1 :\r\n if staterow == 0 and statecol == 0: # om den är i botten, passera\r\n pass\r\n else:\r\n # annars spara båda bokstäverna på sammam plats i result\r\n ress0.insert(0,x0[staterow-1])\r\n ress1.insert(0,x1[statecol-1])\r\n # Om den pekar vänster (samma row)\r\n if backptr[staterow][statecol][0] == staterow:\r\n if staterow == 0 and statecol == 0: # om den är i botten, passera\r\n pass\r\n else:\r\n # annars spacear den raden och stoppar in bokstaven i kolumnen\r\n ress0.insert(0,\" \")\r\n ress1.insert(0,x1[statecol-1])\r\n # Om den pekar upp (samma kolumn)\r\n if backptr[staterow][statecol][1] == statecol:\r\n if staterow == 0 and statecol == 0: # om den är i botten, passera\r\n pass\r\n else:\r\n # annars spacear den kolumnen och stoppar in bokstaven i raden\r\n ress0.insert(0,x0[staterow-1])\r\n ress1.insert(0,\" \")\r\n\r\n # för att inte skriva över staterow i ny initieringen\r\n r = staterow\r\n c = statecol\r\n # initierar de nya tillståndet (följer pekaren)\r\n staterow = backptr[r][c][0]\r\n statecol = backptr[r][c][1]\r\n\r\n # printfunktionen vill ha stringen bakvänd....\r\n ress0.reverse()\r\n ress1.reverse()\r\n # concattar arrayerna till en sträng igen\r\n sum0 =''.join(ress0)\r\n sum1 =''.join(ress1)\r\n # stoppar in i resultatet\r\n result[0]=sum0\r\n result[1]=sum1\r\n\r\n return(result)", "def process_align(self):\n\t\tstm_t_dict = self._process_recog()\n\t\ttrans_t_dict = self._process_trans()\n\t\talign_obj = viterbi_align(stm_t_dict, trans_t_dict, self.label, self.pair_file_path)\n\t\tself.trans_t_dict = align_obj.viterbi(0, len(stm_t_dict)-1, 0, len(trans_t_dict)-1)", "def align(self):\n ...", "def compute_alignments(\n model: torch.nn.Module,\n dl: torch.utils.data.DataLoader,\n params: AttributeDict,\n graph_compiler: BpeCtcTrainingGraphCompiler,\n) -> List[Tuple[str, List[int]]]:\n try:\n num_batches = len(dl)\n except TypeError:\n num_batches = \"?\"\n num_cuts = 0\n\n device = graph_compiler.device\n ans = []\n for batch_idx, batch in enumerate(dl):\n feature = batch[\"inputs\"]\n\n # at entry, feature is [N, T, C]\n assert feature.ndim == 3\n feature = feature.to(device)\n\n supervisions = batch[\"supervisions\"]\n\n cut_ids = []\n for cut in supervisions[\"cut\"]:\n assert len(cut.supervisions) == 1\n cut_ids.append(cut.id)\n\n nnet_output, encoder_memory, memory_mask = model(feature, supervisions)\n # nnet_output is [N, T, C]\n supervision_segments, texts = encode_supervisions(\n supervisions, subsampling_factor=params.subsampling_factor\n )\n # we need also to sort cut_ids as encode_supervisions()\n # reorders \"texts\".\n # In general, new2old is an identity map since lhotse sorts the returned\n # cuts by duration in descending order\n new2old = supervision_segments[:, 0].tolist()\n cut_ids = [cut_ids[i] for i in new2old]\n\n token_ids = graph_compiler.texts_to_ids(texts)\n decoding_graph = graph_compiler.compile(token_ids)\n\n dense_fsa_vec = k2.DenseFsaVec(\n nnet_output,\n supervision_segments,\n allow_truncate=params.subsampling_factor - 1,\n )\n\n lattice = k2.intersect_dense(\n decoding_graph,\n dense_fsa_vec,\n params.output_beam,\n )\n\n best_path = one_best_decoding(\n lattice=lattice,\n use_double_scores=params.use_double_scores,\n )\n\n ali_ids = get_alignments(best_path)\n assert len(ali_ids) == len(cut_ids)\n ans += list(zip(cut_ids, ali_ids))\n\n num_cuts += len(ali_ids)\n\n if batch_idx % 100 == 0:\n batch_str = f\"{batch_idx}/{num_batches}\"\n\n logging.info(\n f\"batch {batch_str}, cuts processed until now is {num_cuts}\"\n )\n\n return ans", "def main():\n long = give_long()\n short = give_short()\n similarity1 = find_similarity(long, short)\n print('The best match is '+similarity1+'.')", "def ratio(n1,n2, explain=0, optimize=False):\n weight_normal_form = 5.0 #distance between soundexes of normal form\n weight_normal_form_soundex = 8.0 #average distance between soundexes of normal form\n weight_geslachtsnaam1 = 10.0 #distance between soundexes of geslachtsnamen\n weight_geslachtsnaam2 = 10.0 #distance between geslachtsnaam\n weight_initials = 2 #distance between initials\n\n nf1 = n1.guess_normal_form()\n nf2 = n2.guess_normal_form()\n\n if not nf1 or not nf2:\n return 0.0\n elif nf1 == nf2:\n return 1.0\n ratio_normal_form = Similarity.average_distance(split(nf1), split(nf2))\n \n #create a simkplified soundex set for this name\n #remove stopwords\n# nf1 = remove_stopwords( nf1)\n# nf2 = remove_stopwords( nf2)\n \n se1 = n1.get_normal_form_soundex()\n se2 = n2.get_normal_form_soundex()\n ratio_normal_form_soundex = Similarity.average_distance( se1, se2)\n \n #gelachtsnaam wordt op twee manieren met elkaar vergeleken\n g1 = n1.geslachtsnaam() #or n1.get_volledige_naam()\n g2 = n2.geslachtsnaam() #or n2.get_volledige_naam()\n g1 = to_ascii(g1)\n g2 = to_ascii(g2)\n if not optimize:\n #de soundexes van de achternaam worden meegewoen\n #g1_soundex = n1.soundex_nl(g1, group=2, length=-1)\n g1_soundex = n1.geslachtsnaam_soundex()\n #g2_soundex = n2.soundex_nl(g2, group=2, length=-1)\n g2_soundex = n2.geslachtsnaam_soundex()\n ratio_geslachtsnaam1 = Similarity.average_distance(g1_soundex, g2_soundex)\n else:\n ratio_geslachtsnaam1 = 1 \n weight_geslachtsnaam1 = 0\n \n #n de afstand van de woorden in de achtenraam zelf\n ratio_geslachtsnaam2 = Similarity.average_distance(\n re.split('[ \\.\\,\\-]', g1.lower()),\n re.split('[ \\.\\,\\-]', g2.lower()),\n levenshtein_ratio)\n n1_initials = n1.initials()\n n1_initials_lower = n1_initials.lower()\n n2_initials = n2.initials()\n n2_initials_lower = n2_initials.lower()\n n1_contains_initials = n1.contains_initials()\n n2_contains_initials = n2.contains_initials()\n #count initials only if we have more than one\n #(or perhaps make this: if we know the first name)\n if len(n1_initials) == 1 or len(n2_initials) == 1:\n #initials count much less if there is only one\n weight_initials = weight_initials_if_one_name_consists_of_one_word_only\n# ratio_initials = .5\n ratio_initials = levenshtein_ratio(n1_initials_lower, n2_initials_lower)\n elif n1_contains_initials or n2_contains_initials:\n ratio_initials = levenshtein_ratio(n1_initials_lower, n2_initials_lower)\n weight_initials = weight_initials_if_one_name_is_in_initials\n elif len(n1_initials) > 1 and len(n2_initials) > 1:\n ratio_initials = levenshtein_ratio(n1_initials_lower, n2_initials_lower)\n else:\n ratio_initials = 0.7\n \n if n1_contains_initials or n2_contains_initials:\n weight_normal_form = weight_normal_form_if_one_name_is_in_initials \n weight_normal_form_soundex = weight_normal_form_soundex_if_one_name_is_in_initials\n\n counter = (ratio_normal_form * weight_normal_form +\n ratio_normal_form_soundex * weight_normal_form_soundex +\n ratio_geslachtsnaam1 * weight_geslachtsnaam1 +\n ratio_geslachtsnaam2 * weight_geslachtsnaam2 +\n ratio_initials * weight_initials)\n numerator = (weight_normal_form + weight_normal_form_soundex +\n weight_initials + weight_geslachtsnaam1 + weight_geslachtsnaam2)\n if numerator == 0:\n return 0.0\n final_ratio = counter/numerator\n\n if explain:\n s = '-' * 100 + '\\n'\n s += 'Naam1: %s [%s] [%s] %s\\n' % (n1, n1_initials, n1.guess_normal_form(), se1)\n s += 'Naam2: %s [%s] [%s] %s\\n' % (n2, n2_initials, n2.guess_normal_form(), se2)\n s += 'Similarity ratio: %s\\n' % final_ratio\n s += '--- REASONS' + '-' * 30 + '\\n'\n format_s = '%-30s | %-10s | %-10s | %-10s | %-10s | %s-10s\\n'\n s += format_s % ('\\t property', ' ratio', ' weight','relative_weight', ' r*w', 'r * relative_w')\n s += '\\t' + '-' * 100 + '\\n'\n format_s = '\\t%-30s | %-10f | %-10f | %-10f | %-10f | %-10f\\n'\n s += format_s % (' normal_form', ratio_normal_form, weight_normal_form,weight_normal_form/counter, ratio_normal_form * weight_normal_form, ratio_normal_form * weight_normal_form/counter)\n s += format_s % ('soundex van normal_form', ratio_normal_form_soundex, weight_normal_form_soundex,weight_normal_form_soundex/counter, ratio_normal_form_soundex* weight_normal_form_soundex, ratio_normal_form_soundex * weight_normal_form_soundex/counter)\n s += format_s % ('soundex van geslachtsnaam1', ratio_geslachtsnaam1, weight_geslachtsnaam1,weight_geslachtsnaam1/counter, ratio_geslachtsnaam1 * weight_geslachtsnaam1, ratio_geslachtsnaam1 * weight_geslachtsnaam1/counter)\n s += format_s % ('geslachtsnaam', ratio_geslachtsnaam2, weight_geslachtsnaam2,weight_geslachtsnaam2/counter, ratio_geslachtsnaam2 *weight_geslachtsnaam2 , ratio_geslachtsnaam2 * weight_geslachtsnaam2/counter)\n s += format_s % ('initials', ratio_initials, weight_initials, weight_initials/counter, ratio_initials *weight_initials, ratio_initials * weight_initials/counter)\n s += '\\tTOTAL (numerator) | %s (counter = %s)\\n' % (counter, numerator)\n \n return s\n return final_ratio", "def ComputeDistMatrix(dict_alignedSequences):\r\n \r\n # check if dictionary with keys as tuples containing integers and values as tuples containing strings\r\n check = True \r\n #1 Check Input is dict\r\n if isinstance(dict_alignedSequences, dict) == False:\r\n check = False\r\n \r\n #2 Check are the keys and values tuples. Do the keys only contain integers and the vlaues only strings\r\n i = 0\r\n while len(dict_alignedSequences) > i:\r\n #checking for keys and values as tuples\r\n if isinstance(list(dict_alignedSequences.keys())[i], tuple) == False or isinstance(list(dict_alignedSequences.values())[i], tuple) == False:\r\n check = False\r\n break\r\n #checking keys for integers\r\n if isinstance(list(dict_alignedSequences.keys())[i][0], int) == False or isinstance(list(dict_alignedSequences.keys())[i][1], int) == False:\r\n check = False\r\n break\r\n #checking values for strings\r\n if isinstance(list(dict_alignedSequences.values())[i][0], str) == False or isinstance(list(dict_alignedSequences.values())[i][1], str) == False:\r\n check = False\r\n break\r\n \r\n #increment the counter for while loop\r\n i += 1\r\n \r\n #3 Check sequences contain aligned DNA and are of equal length\r\n for key in dict_alignedSequences:\r\n if is_aligned_dna(dict_alignedSequences[key][0]) == False or is_aligned_dna(dict_alignedSequences[key][1]) == False:\r\n check = False\r\n break\r\n if len(dict_alignedSequences[key][0]) != len(dict_alignedSequences[key][1]):\r\n check = False\r\n break\r\n \r\n #final evalauation if data is usable\r\n if check == False:\r\n raise TypeError ('malformed input')\r\n \r\n #get number of sequences\r\n matrixdim = howmany_sequences(dict_alignedSequences)\r\n #initialize dist matrix\r\n distMatrix = init_Dist_Matrix(matrixdim)\r\n \r\n \r\n for i in dict_alignedSequences.keys():\r\n # useing the key i to get the corisponding aligned sequences \r\n seq = dict_alignedSequences[i]\r\n #calculate distances between the sequences\r\n distance = calculate_distance(seq[0],seq[1])\r\n #markdown result at the corrsiponding place in the distmatrix\r\n distMatrix[i[0]][i[1]] = distance\r\n distMatrix[i[1]][i[0]] = distance\r\n \r\n return(distMatrix)", "def _calculate_fscore(matching_char_n_grams: Dict[int, Tensor], matching_word_n_grams: Dict[int, Tensor], hyp_char_n_grams: Dict[int, Tensor], hyp_word_n_grams: Dict[int, Tensor], ref_char_n_grams: Dict[int, Tensor], ref_word_n_grams: Dict[int, Tensor], n_order: float, beta: float) ->Tensor:\n\n def _get_n_gram_fscore(matching_n_grams: Dict[int, Tensor], ref_n_grams: Dict[int, Tensor], hyp_n_grams: Dict[int, Tensor], beta: float) ->Dict[int, Tensor]:\n \"\"\"Get n-gram level f-score.\"\"\"\n precision: Dict[int, Tensor] = {n: (matching_n_grams[n] / hyp_n_grams[n] if hyp_n_grams[n] > 0 else tensor(0.0)) for n in matching_n_grams}\n recall: Dict[int, Tensor] = {n: (matching_n_grams[n] / ref_n_grams[n] if ref_n_grams[n] > 0 else tensor(0.0)) for n in matching_n_grams}\n denominator: Dict[int, Tensor] = {n: torch.max(beta ** 2 * precision[n] + recall[n], _EPS_SMOOTHING) for n in matching_n_grams}\n f_score: Dict[int, Tensor] = {n: ((1 + beta ** 2) * precision[n] * recall[n] / denominator[n]) for n in matching_n_grams}\n return f_score\n char_n_gram_f_score = _get_n_gram_fscore(matching_char_n_grams, ref_char_n_grams, hyp_char_n_grams, beta)\n word_n_gram_f_score = _get_n_gram_fscore(matching_word_n_grams, ref_word_n_grams, hyp_word_n_grams, beta)\n f_score = (sum(char_n_gram_f_score.values()) + sum(word_n_gram_f_score.values())) / tensor(n_order)\n return f_score", "def beam_search(X, u, w, b, relLabels):\n\n candidate_paths = [[] for _ in range(10)] # contains the candidate label sets\n candidate_vals =[[] for _ in range(10)] # contains the label values (-1/1) for each candidate set\n candidate_scores = [0. for _ in range(10)]\n min_score = -1000\n\n iter = 0\n start = 0\n while True:\n # print(\"Iter: \", iter)\n intermediate_paths = {}\n # intermediate_paths_val = []\n interim_scores = []\n hash_table = {}\n\n cnt_paths = 0\n for cp in range(5):\n labels_curr = candidate_paths[cp]\n labels_val_curr = candidate_vals[cp]\n scores_curr = candidate_scores[cp]\n Y = -np.ones((10, 1))\n for lv in range(len(labels_val_curr)):\n Y[labels_curr[lv]] = labels_val_curr[lv]\n\n for l in range(10):\n candidate_interim = labels_curr[:]\n candidate_vals_interim = labels_val_curr[:]\n # if l in labels_curr:\n # continue\n\n temp_relLabels = []\n for lc in range(len(labels_curr)):\n temp_relLabels.extend(relLabels[labels_curr[lc]])\n\n # temp_relLabels = np.array(list(set(temp_relLabels)))\n temp_relLabels = np.array(list(set(relLabels[l]).intersection(set(labels_curr))))\n model_pos = returnModelVal(X, Y, 1.0, u[l], u[l], b[l][0], np.array(temp_relLabels))\n candidate_interim.append(l)\n\n if model_pos < 0:\n # print('hello')\n candidate_vals_interim.append(-1)\n interim_scores.append(-model_pos)\n else:\n candidate_vals_interim.append(1)\n interim_scores.append(model_pos)\n\n hash_table[cnt_paths] = candidate_interim\n intermediate_paths[cnt_paths] = candidate_vals_interim\n cnt_paths += 1\n # For the first iteration, just iterate once - all labels in one iteration\n if start == 0:\n start = 1\n break\n\n temp_paths = intermediate_paths\n interim_zip = zip(intermediate_paths, interim_scores)\n sorted_scores = sorted(interim_zip, key=lambda x: x[1], reverse=True)[:5]\n intermediate_paths, scores = zip(*sorted_scores)\n\n temp_cand = []\n temp_val = []\n for i in range(len(intermediate_paths)):\n temp_cand.append(hash_table[intermediate_paths[i]])\n temp_val.append(temp_paths[intermediate_paths[i]])\n # candidate_scores[i] += scores[i]\n\n candidate_paths = temp_cand\n candidate_vals = temp_val\n print(candidate_paths)\n print(candidate_vals)\n # print(scores)\n # candidate_scores = scores\n\n # Exit condition from loop\n # if max(interim_scores) < min_score:\n # break\n #\n # min_score = min(interim_scores)\n\n iter += 1\n if iter > 5:\n break\n\n candidate_dict = {}\n for i in range(5):\n for c in range(len(candidate_paths[i])):\n if candidate_paths[i][c] not in candidate_dict:\n candidate_dict[candidate_paths[i][c]] = candidate_vals[i][c]\n elif candidate_dict[candidate_paths[i][c]] != 2:\n if candidate_dict[candidate_paths[i][c]] != candidate_vals[i][c]:\n candidate_dict[candidate_paths[i][c]] = 2.\n\n print(candidate_dict)\n exit()\n return candidate_dict", "def optimalize(): \n start = time()\n max = 0\n maxn=2\n maxm=3\n check = [(n,m) for n in range(24,30) for m in range(3,20)]\n dict = {}\n print \"start optimalization of: bigram-features,uniqueness\"\n for n,m in check:\n score=0\n print \">lem>>n(uniqueness):\"+str(n)\n print \">lem>>m(commonness):\"+str(m)\n wrds = common_but_unique(ngrams_dict(1,authors,compactcorpus,n,False),m)\n bigrams = common_but_unique(ngrams_dict(2,authors,compactcorpus,n,False),m)\n trigrams = common_but_unique(ngrams_dict(3,authors,compactcorpus,n,False),m)\n #pos_feat = [\"wrd:\"+wrd+\">\"+str(num) for wrd in wrds for num in range(0,1)]\n pos_feat = [\"bi:(\"+str(bi[0])+\",\"+str(bi[1])+\")>\"+str(num) for bi in bigrams for num in range(0,1)] + [\"wrd:\"+wrd+\">\"+str(num) for wrd in wrds for num in range(0,1)] + [\"tri:(\"+str(tri[0])+\",\"+str(tri[1])+\",\"+str(tri[2])+\")>\"+str(num) for tri in trigrams for num in range(0,1)]\n\n print \"number of features AFTER selection:\" + str(len(pos_feat))\n for x in range(0,4):\n data = split_train_test_data(authors, corp,45)\n train_set = [(feat_dict(pos_feat,d), c) for (d, c) in data[\"train\"]]\n train_set = [(feat_dict(pos_feat,d), c) for (d, c) in data[\"train\"]]\n test_set = [(feat_dict(pos_feat,d), c) for (d, c) in data[\"test\"]]\n classifier1 = NaiveBayesClassifier.train(train_set)\n acc = nltk.classify.accuracy(classifier1,test_set)\n print \"accuracy:\"+str(acc)\n score +=acc\n print \"time elapsed: \"+str(time()-start)\n print \"score(\" + str(n) +\")=\"+str(score/4)\n classifier1.show_most_informative_features(8)\n dict[(n,m)]=(score/4)\n if(score/4)>max:\n max = (score/4)\n maxn =n\n maxm = m\n print \"max score=\"+str(max)\n print \"where n = \"+str(maxn)\n print \"where m = \"+str(maxm)\n print \"time:\"+str(time()-start)\n writetofile(dict,\"optimalizedict_commonwrdsandbigrams_latest_lem.pkl\")", "def prf_align(prf1, prf2):\n\n Δ0,a0,b0 = guess_align_params(prf1, prf2)\n params = lmfit.Parameters()\n params.add('Δ', value=Δ0)\n params.add('a', value=a0)\n params.add('b', value=b0)\n \n prf_diff = prf_diff_fn(prf1, prf2)\n \n res = lmfit.minimize(prf_diff, params)\n \n Δ = res.params['Δ']\n a = res.params['a']\n b = res.params['b']\n \n return prf_shift_scale(prf2, Δ, a, b)", "def main():\r\n ## In argument hanterare\r\n parser = argparse.ArgumentParser(description='Aligner')\r\n group = parser.add_mutually_exclusive_group(required=True)\r\n group.add_argument('--file', '-f', type=str, nargs=2, help='align two strings')\r\n group.add_argument('--string', '-s', type=str, nargs=2, help='align the contents of two files')\r\n\r\n parser.add_argument('--check', action='store_true', help='check if your alignment is correct')\r\n\r\n arguments = parser.parse_args() # In argumenten skrivna i temrinalen\r\n\r\n\r\n### Behandlar in argumenetn och gör det till två strängar s0[] & s1[]\r\n## Eller om det är en url som skall checka om man gjort rätt.\r\n if arguments.file:\r\n f1, f2 = arguments.file\r\n with codecs.open(f1, 'r', 'utf-8') as f:\r\n s1 = f.read().replace('\\n', '')\r\n with codecs.open(f2, 'r', 'utf-8') as f:\r\n s2 = f.read().replace('\\n', '')\r\n\r\n elif arguments.string:\r\n s1, s2 = arguments.string\r\n\r\n if arguments.check:\r\n payload = json.dumps({\r\n 's1': s1,\r\n 's2': s2,\r\n 'result': align(s1, s2, compute_backpointers(s1, s2))\r\n })\r\n response = requests.post(\r\n 'https://language-engineering.herokuapp.com/correct',\r\n data=payload,\r\n headers={'content-type': 'application/json'}\r\n )\r\n response_data = response.json()\r\n if response_data['correct']:\r\n print_alignment( align(s1, s2, compute_backpointers(s1, s2)))\r\n print('Success! Your results are correct')\r\n else:\r\n print('Your results:\\n')\r\n print_alignment( align(s1, s2, compute_backpointers(s1, s2)))\r\n print(\"The server's results\\n\")\r\n print_alignment(response_data['result'])\r\n print(\"Your results differ from the server's results\")\r\n else:\r\n print_alignment( align(s1, s2, compute_backpointers(s1, s2)))", "def get_alignment_params(self, s, w):\n\n\n X1 = s.__get_X(w)\n X2 = self.__get_X(w)\n Y1 = s.__get_Y(w)\n Y2 = self.__get_Y(w)\n Z = self.__get_Z(w)\n W = sum(w)\n C1 = self.__get_C1(w, s)\n C2 = self.__get_C2(w, s)\n\n a = np.array([[ X2, -Y2, W, 0],\n [ Y2, X2, 0, W],\n [ Z, 0, X2, Y2],\n [ 0, Z, -Y2, X2]])\n\n b = np.array([X1, Y1, C1, C2])\n # Solve equations\n # result is [ax, ay, tx, ty]\n return np.linalg.solve(a, b)", "def alignScore():\n matrix = mapMatrix(\"BLOSUM62\")\n \n path = \"./data/\"\n for file in os.listdir(path):\n if file.endswith(\".fa\") or file.endswith(\".fasta\"):\n sequences = []\n input_sequences = SeqIO.parse(path + file, \"fasta\", \\\n IUPAC.protein)\n\n for record in input_sequences:\n seq = str(record.seq)\n sequences.append(seq) \n \n SumOfPairs = 0\n for pair in combinations(sequences, 2): \n SumOfPairs += pairwiseScore(pair[0], pair[1], matrix)\n \n print SumOfPairs", "def align(self):\n\n # load the alignment parameters into the align_params object\n self.align_params.load_params_from_file(self.input_file)\n\n # populate the score matrices based on the input parameters\n self.populate_score_matrices()\n\n # perform a traceback and write the output to an output file\n\n ### FILL IN ###", "def preprocessing():\n english_dictionary = nltk.corpus.brown.words()\n slang_vocab = pickle.load(open('vocab_pattern_match_with_freq.pkl', 'rb'))\n\n normalize_english_dict = len(english_dictionary)\n normalize_slang_vocab = 0\n for w, n in slang_vocab.items():\n normalize_slang_vocab += n\n\n words = {}\n for w, n in Counter(english_dictionary).items():\n words[w] = n/normalize_english_dict\n \n for w, n in slang_vocab.items():\n if w not in words:\n words[w] = 0.\n words[w] += n/normalize_slang_vocab\n\n words_by_freq = [w for w,_ in sorted(words.items(), key=lambda x: x[1], reverse=True)]\n\n # Build a cost dictionary, assuming Zipf's law and cost = -math.log(probability).\n #words = open(\"words_by_frequency.txt\").read().split()\n wordcost = dict((k, log((i+1)*log(len(words_by_freq)))) for i,k in enumerate(words_by_freq))\n maxword = max(len(x) for x in words_by_freq)\n return wordcost,maxword", "def most_similar_direction(self, positive=[], negative=[], fullVariable=[], topn=False, vecModel=None):\n self.init_sims()\n #logger = logging\n #logger.debug(\"\\t\\t========direction===========\") \n #logger.debug(\"d, c: %s\", positive) \n #logger.debug(\"b, a: %s\", negative) \n \n \n if isinstance(positive, string_types) and not negative:\n # allow calls like most_similar('dog'), as a shorthand for most_similar(['dog'])\n positive = [positive]\n\n # add weights for each word, if not already present; default to 1.0 for positive and -1.0 for negative words\n WordDict = {}\n for word in positive: \n if isinstance(word, string_types + (ndarray,)): # plus mean or type\n WordDict[word] = 1.0\n for word in negative:\n if isinstance(word, string_types + (ndarray,)):\n WordDict[word] = -1.0 \n\n all_words = set() # store question words\n #limited = [] # store d (i.e. topN words having closest distance with b - a + c from most_similiar)\n \n def getData(word):\n return WordDict[word], self.vocab[word].index, self.syn0norm[self.vocab[word].index] \n \n wordD_list =[]\n for i, item in enumerate(fullVariable):\n if i == 0:\n wordA = fullVariable[i]\n wordA_weight, wordAIndex, vecA = getData(wordA)\n all_words.add(wordAIndex)\n elif i == 1:\n wordB = fullVariable[i]\n wordB_weight, wordBIndex, vecB = getData(wordB)\n all_words.add(wordBIndex)\n elif i == 2:\n wordC = fullVariable[i]\n wordC_weight, wordCIndex, vecC = getData(wordC)\n all_words.add(wordCIndex)\n else: \n wordD_list.append(fullVariable[i])\n \n \n # add assessment item Validated version \n ##get all (d - b) . checked \n tempLimit = numpy.zeros(shape=(vecModel.shape)) #Quick Fix. Fake array to fit into Gensim format\n for word in wordD_list: # these are the predict ans (d) and b\n if isinstance(word, ndarray):\n sys.exit() # not yet fixed\n elif word in self.vocab:\n wordD_weight, wordDIndex, vecD = getData(word)\n vecD_weight = wordD_weight * vecD\n vecB_weight = wordB_weight * vecB\n tempLimit[wordDIndex] = matutils.unitvec(array([ vecD_weight, vecB_weight ]).mean(axis=0)).astype(REAL)\n else:\n raise KeyError(\"word '%s' not in vocabulary\" % word)\n \n # compute c - a \n if isinstance(wordC, ndarray) and isinstance(wordA, ndarray): \n sys.exit() # not yet fixed\n elif wordC in self.vocab and wordA in self.vocab:\n vecA_weight = wordA_weight * vecA \n vecC_weight = wordC_weight * vecC\n #mean = matutils.unitvec(array([ vecC_weight, vecA_weight ]).mean(axis=0)).astype(REAL) # mean of c - a \n indexText = wordA+\"-\"+wordC\n mean = SVD_model[indexText]\n #print indexText, mean[0:5]\n else:\n raise KeyError(\"word '%s' not in vocabulary\" % word) \n dists1 = dot(tempLimit, mean) # all row are zero, except those top10 word row \n resIdx = numpy.nonzero(dists1)[0].tolist()\n vecDict = {}\n for i in resIdx:\n vecDict[self.index2word[i]] = dists1[i]\n from collections import OrderedDict\n sorted_vecDict = OrderedDict(sorted(vecDict.items(), key=lambda t: t[1], reverse=True))\n\n # debug version : \n# tempLimit1 = numpy.zeros(shape=(vecModel.shape[0])) #Quick Fix. Fake array to fit into Gensim format\n# scoreSection= dict()\n# u = (vecC - vecA ) / (numpy.linalg.norm(vecC - vecA))\n# #print wordA, wordC, u\n# with open(\"temp.txt\", \"ab\") as text_file:\n# text_file.write(wordC +\"-\"+ wordA +\" \"+ \" \".join(str(item) for item in u) +\"\\n\")\n# text_file.close()\n# B_C = dot(vecB,vecC)\n# B_A = dot(vecB,vecA)\n# for word in wordD_list: # these are the predict ans (d) and b\n# if isinstance(word, ndarray):\n# # vecA = wordA\n# #limited.append(matutils.unitvec(array([ vecD, vecB ]).mean(axis=0)).astype(REAL)) # mean = algeria 1.0 vec, algiers -1.0 vec\n# sys.exit() # not yet fixed\n# elif word in self.vocab:\n# wordD_weight, wordDIndex, vecD = getData(word)\n# #print wordD_weight, wordDIndex, vecD[1:4] , word\n# D_C = dot(vecD,vecC)\n# D_A = dot(vecD,vecA)\n# D_B = dot(vecD,vecB)\n# C_A = dot(vecC,vecA)\n# totalScore = (D_C - D_A - B_C + B_A) / (numpy.linalg.norm(vecD - vecB) * numpy.linalg.norm(vecC - vecA))\n# totalScoreText = str(D_C) +\" \"+ str(D_B)+ \" \"+ str(D_A) +\" \"+\\\n# str(B_C) +\" \"+ str(C_A) +\" \" + str(B_A) +\" \" + str(totalScore)\n# tempLimit1[self.vocab[word].index] = totalScore\n# scoreSection[self.vocab[word].index] = totalScoreText\n# else:\n# raise KeyError(\"word '%s' not in vocabulary\" % word)\n# dists2 = tempLimit1\n# resIdx2 = numpy.nonzero(dists2)[0].tolist()\n# \n# vecDict2 = {}\n# for i in resIdx2:\n# vecDict2[self.index2word[i]] = scoreSection[i]\n# from collections import OrderedDict\n# sorted_vecDict2 = OrderedDict(sorted(vecDict2.items(), key=lambda t: t[1], reverse=True))\n# \n# # best_temp3 = matutils.argsort(dists2, topn=topn, reverse=True).tolist()\n# # best_temp0 = matutils.argsort(dists1, topn=topn, reverse=True).tolist()\n# # \n# # if best_temp0 != best_temp3:\n# # #print \"different argmax result:\", fullVariable\n# # print [item for item in best_temp0] \n# # print [item for item in best_temp3] \n# # sys.exit()\n# # else:\n# dists1 = dists2\n# sorted_vecDict = sorted_vecDict2\n \n # combine \n best1 = matutils.argsort(dists1, topn=topn, reverse=True)\n #print best1\n if any(dists1[i] <= 0 for i in best1):\n logger.debug(\"Direction_Bad: %s\", \\\n sorted_vecDict) \n writeList2File(\"direction.txt\", [\"000\"])\n return None\n \n # make sure result is a sorted version of the distance result , else quit \n import collections\n if collections.Counter(wordD_list) != \\\n collections.Counter([self.index2word[item] for item in best1]):\n logger.debug(\"Fail: %s\\n %s\\n\", \\\n wordD_list, len(wordD_list),\\\n [self.index2word[item] for item in best1], len([self.index2word[item] for item in best1])) \n sys.exit() \n \n logger.debug(\"Direction: %s\", \\\n [(self.index2word[sim], sorted_vecDict[self.index2word[sim]]) for sim in best1 if sim not in all_words]) \n writeList2File(\"direction.txt\", [self.index2word[sim] for sim in best1 if sim not in all_words]) \n return dists1", "def align(model,\n left,\n right,\n max_length = 512):\n inputs = preprocess(left, right, max_length)\n output = model(inputs)\n output = expand(output)\n scores, path, params = postprocess(output, len(left), len(right))\n return Alignment(left, right, scores, path, params)", "def _local_improvement(self, folded_design):\n differing_sites = _string_difference_indices(\n self.target.dot_bracket, folded_design\n )\n hamming_distances = []\n for mutation in product(\"AGCU\", repeat=len(differing_sites)):\n mutated = self.design.get_mutated(mutation, differing_sites)\n folded_mutated, _ = fold(mutated.primary)\n hamming_distance = hamming(folded_mutated, self.target.dot_bracket)\n hamming_distances.append(hamming_distance)\n if hamming_distance == 0: # For better timing results\n return 0\n return min(hamming_distances)", "def build_alignment(self,score,pieces):\n\t \t# build text\n\t\tself.open_seqs()\n\t\ttext1 = text2 = \"\"\n\t\tend1 = end2 = None\n\t\tfor (start1,start2,length,pctId) in pieces:\n\t\t\tif (end1 != None):\n\t\t\t\tif (start1 == end1): # insertion in sequence 2\n\t\t\t\t\ttext1 += self.seq1_gap * (start2-end2)\n\t\t\t\t\ttext2 += self.seq2_file.get(end2,start2-end2)\n\t\t\t\telse: # insertion in sequence 1\n\t\t\t\t\ttext1 += self.seq1_file.get(end1,start1-end1)\n\t\t\t\t\ttext2 += self.seq2_gap * (start1-end1)\n\n\t\t\ttext1 += self.seq1_file.get(start1,length)\n\t\t\ttext2 += self.seq2_file.get(start2,length)\n\t\t\tend1 = start1 + length\n\t\t\tend2 = start2 + length\n\t\t# create alignment\n\t\tstart1 = pieces[0][0]\n\t\tstart2 = pieces[0][1]\n\t\tend1 = pieces[-1][0] + pieces[-1][2]\n\t\tend2 = pieces[-1][1] + pieces[-1][2]\n\t\tsize1 = end1 - start1\n\t\tsize2 = end2 - start2\n\t\ta = Alignment(score=score,species_to_lengths=self.species_to_lengths)\n\t\t#if (self.seq1_strand == \"-\"): start1 = self.seq1_file.length - end1\n\t\ta.add_component(Component(self.seq1_src,start1,size1,self.seq1_strand,text=text1))\n\t\t#if (self.seq2_strand == \"-\"): start2 = self.seq2_file.length - end2\n\t\ta.add_component(Component(self.seq2_src,start2,size2,self.seq2_strand,text=text2))\n\t\treturn a", "def mas_width1(attn_map):\n # assumes mel x text\n opt = np.zeros_like(attn_map)\n attn_map = np.log(attn_map)\n attn_map[0, 1:] = -np.inf\n log_p = np.zeros_like(attn_map)\n log_p[0, :] = attn_map[0, :]\n prev_ind = np.zeros_like(attn_map, dtype=np.int64)\n for i in range(1, attn_map.shape[0]):\n for j in range(attn_map.shape[1]): # for each text dim\n prev_log = log_p[i - 1, j]\n prev_j = j\n\n if j - 1 >= 0 and log_p[i - 1, j - 1] >= log_p[i - 1, j]:\n prev_log = log_p[i - 1, j - 1]\n prev_j = j - 1\n\n log_p[i, j] = attn_map[i, j] + prev_log\n prev_ind[i, j] = prev_j\n\n # now backtrack\n curr_text_idx = attn_map.shape[1] - 1\n for i in range(attn_map.shape[0] - 1, -1, -1):\n opt[i, curr_text_idx] = 1\n curr_text_idx = prev_ind[i, curr_text_idx]\n opt[0, curr_text_idx] = 1\n return opt", "def findAligned(self,nbr_aligned):\n bVerbose = 0\n # horiz:\n for j in range(self.h):\n nPrev = -1\n nConsecutive = 0\n for i in range(self.w):\n val = self.world[j][i]\n if val == nPrev:\n nConsecutive += 1\n if nConsecutive == nbr_aligned and val != 0:\n return val\n else:\n nPrev = val\n nConsecutive = 1\n \n # vertic:\n for i in range(self.w):\n nPrev = -1\n nConsecutive = 0\n for j in range(self.h):\n val = self.world[j][i]\n if val == nPrev:\n nConsecutive += 1\n if nConsecutive == nbr_aligned and val != 0:\n return val\n else:\n nPrev = val\n nConsecutive = 1\n \n # for diags we decide of a starting point then we decay\n # diag \\\n for j in range(self.h-nbr_aligned+1):\n for i in range(self.w-nbr_aligned+1):\n k = 0\n nPrev = -1\n nConsecutive = 0\n while 1:\n if bVerbose: print(\"DBG: findAligned diag \\: testing %d,%d\" % (i+k,j+k))\n try:\n val = self.world[j+k][i+k]\n except IndexError:break\n \n if val == nPrev:\n nConsecutive += 1\n if nConsecutive == nbr_aligned and val != 0:\n return val\n else:\n nPrev = val\n nConsecutive = 1\n k += 1\n \n # diag /\n for j in range(self.h-nbr_aligned+1):\n for i in range(nbr_aligned-1,self.w):\n k = 0\n nPrev = -1\n nConsecutive = 0\n while 1:\n if bVerbose: print(\"DBG: findAligned diag /: testing %d,%d\" % (i-k,j+k))\n try:\n if i-k<0:\n break\n val = self.world[j+k][i-k]\n except IndexError:break\n \n if val == nPrev:\n nConsecutive += 1\n if nConsecutive == nbr_aligned and val != 0:\n return val\n else:\n nPrev = val\n nConsecutive = 1\n k += 1\n\n \n return 0", "def align_sequences_heterodimers(structure, option):\n\n\tchains = list(structure.get_chains())\n\tbest_aln = []\n\tn = len(chains)\n\n\tif option:\n\t\tsys.stderr.write(\"Align chain sequences: \\n\")\n\t#Select sequences to align\n\tfor i in range(n-1):\n\t\tid1 = chains[i].get_full_id()[1:]\n\t\tseq1 = get_sequence(chains[i])\n\t\t\n\t\tfor j in range(i+1,n):\n\t\t\tid2 = chains[j].get_full_id()[1:]\n\t\t\tseq2 = get_sequence(chains[j])\t\n\n\t\t\t#Align sequence\n\t\t\talns = pairwise2.align.globalxx(seq1, seq2)\n\t\t\tscore = alns[0][2]\n\n\t\t\t#Calculate sequence identity\n\t\t\tident_perc = score / max(len(seq1), len(seq2))\n\n\t\t\tif ident_perc > 0.99:\n\t\t\t\tbest_aln.append((id1,id2))\n\t\t\t\tif option:\n\t\t\t\t\tsys.stderr.write(\"\\t-Sequence of %s has aligned with %s with a identity of %d\\n\" %(id1, id2, ident_perc))\n\t\n\treturn best_aln", "def get_alignment_prob(self,alignments,sentence_alignments,t_prob):\n a_prob = {}\n #Calculate alignment probability\n for i in range(len(alignments)):\n a_list = alignments[i]\n total_prob_sum = 0\n for tuple in a_list:\n r = tuple[0] # english words across rows\n c = tuple[1] # foreign words across columns\n total_prob_sum += t_prob[r][c]\n a_prob[i] = total_prob_sum\n ## Nomalise alignment probability\n for s_id in sentence_alignments.keys():\n alignment_sum = 0\n align_ids = sentence_alignments[s_id]\n for a_id in align_ids:\n alignment_sum += a_prob[a_id] # Calculate nomalzatio nfactor\n for a_id in align_ids:\n a_prob[a_id] /= alignment_sum # normalize\n\n return a_prob", "def identifyMindist(align, ignoreGaps=True):\n align = padAlignment(align)\n cons = consensus(align, ignoreGaps)\n dist = align.map(partial(hamming_distance, cons))\n return align[dist.argmin()]", "def compute_backpointers(s0, s1): #Tillverkar en array med backpointrs\r\n if s0 == None or s1 == None:\r\n raise Exception('Both s0 and s1 have to be set')\r\n rows = len(s0)+1 # antalet rader\r\n columns = len(s1)+1 # antalet kolumner\r\n\r\n ####### Tillverkar Levenshtein matrisen ########\r\n # Gör en tom matris med nollor\r\n distance = [[0 for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # Gör de yttre lagrerna i matrisen 0 -> len(str) vertikalt och horisontellt\r\n for i in range(1,rows):\r\n distance[i][0] = i\r\n for i in range(1,columns):\r\n distance[0][i] = i\r\n\r\n # Beräknar kostnaderna för varje plats inne i matrisen och sätter in dem\r\n # kollar om bokstaven på indexet i de två orden är samma i sådana fall kostar det 0\r\n # och skall ha samma värde som diagonalt innan, annars kostar det 1 från över eller underself.\r\n for column in range(1,columns):\r\n for row in range(1,rows): # kolla varje rad i vare column\r\n if s0[row-1] == s1[column -1]: # om det är samma bokstav kostar det 0\r\n c = 0\r\n else: # annars kostar det 2\r\n c = 2\r\n distance[row][column] = min(distance[row-1][column] + 1,distance[row][column-1] + 1,distance[row-1][column-1] + c)\r\n # raden över säger att det minsta värdet av över eller bredvid + 1 eller diagonalt innan plus (0 eller 2)\r\n # skall sättas in på platsen i matrisen.\r\n\r\n # det minsta avståndet är\r\n cost = distance[row][column]\r\n print(\"totalkostnaden är\")\r\n print(cost)\r\n\r\n\r\n ####### Tillverkar backptr-matrisen ########\r\n # Tillverkar en tom matris med [0,0] för till backptr-matrisen\r\n backptr = [[[0, 0] for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # går igenom platserna i Levenshtein matrisen bakirfrån\r\n for column in range(columns-1,0,-1):\r\n for row in range(rows-1,0,-1):\r\n # Om värdet till vänster är det minsta: peka vänster\r\n if distance[row][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row\r\n backptr[row][column][1] = column -1\r\n # Om värdet över är det minsta: peka upp\r\n if distance[row-1][column] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row -1\r\n backptr[row][column][1] = column\r\n # om värdet diagonalt är minst: peka på diagonalt\r\n if distance[row-1][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row-1\r\n backptr[row][column][1] = column -1\r\n\r\n # Gör yttervärdena i matrisen, (OBS behövs ej)\r\n for i in range(0,rows):\r\n j = i-1\r\n backptr[i][0][0] = j\r\n backptr[i][0][1] = 0\r\n for i in range(0,columns):\r\n j = i-1\r\n backptr[0][i][1] = j\r\n backptr[0][i][0] = 0\r\n\r\n return backptr", "def get_affine():\n root_dir = \"/home/sdb/wangshentao/myspace/thesis/data/VisDrone2019-MOT-test-dev/\"\n seq_dir = root_dir + \"sequences/\"\n affine_dir = root_dir + \"affine/\"\n if not os.path.exists(affine_dir):\n os.makedirs(affine_dir)\n MIN_MATCH_COUNT = 10\n # 1088 is more accurate\n for seq in os.listdir(seq_dir):\n print(seq)\n seq_files = os.listdir(os.path.join(seq_dir, seq))\n seq_files = sorted(seq_files, key=lambda x: int(x[:-4]))\n affine_dict = {}\n for i in range(len(seq_files)-1):\n print(i)\n image0 = cv2.imread(os.path.join(seq_dir, seq, seq_files[i]))\n image1 = cv2.imread(os.path.join(seq_dir, seq, seq_files[i+1]))\n image0, _, _, _ = letterbox(image0)\n image1, _, _, _ = letterbox(image1)\n image0 = cv2.cvtColor(image0, cv2.COLOR_BGR2GRAY)\n image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2GRAY)\n surf = cv2.xfeatures2d.SURF_create()\n kp0, des0 = surf.detectAndCompute(image0, None)\n kp1, des1 = surf.detectAndCompute(image1, None)\n FLANN_INDEX_KDTREE = 0\n index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)\n search_params = dict(checks=50)\n\n flann = cv2.FlannBasedMatcher(index_params, search_params)\n matchs = flann.knnMatch(des0, des1, k=2)\n\n # store all the good matchs as per Lowe's ratio test\n good = []\n for m, n in matchs:\n if m.distance < 0.7 * n.distance:\n good.append(m)\n if len(good) > MIN_MATCH_COUNT:\n src_pts = np.float32([kp0[m.queryIdx].pt for m in good]).reshape(-1, 1, 2)\n dst_pts = np.float32([kp1[m.trainIdx].pt for m in good]).reshape(-1, 1, 2)\n M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)\n else:\n M = np.eye(3, 3)\n affine_dict[seq_files[i]] = M\n with open(os.path.join(seq_dir, affine_dir, seq+'.pickle'), 'wb') as fout:\n pickle.dump(affine_dict, fout)", "def main(result_directory, d_best, min_precision, targeted_files,\n null_distance):\n\n # paths\n path_log = os.path.join(result_directory, \"log_final_reduced\")\n\n # get data\n df_log = pd.read_csv(path_log, header=0, encoding=\"utf-8\", sep=\";\",\n index_col=False)\n path_tfidf = os.path.join(result_directory, \"tfidf.npz\")\n tfidf = load_sparse_csr(path_tfidf)\n print(\"df_log shape :\", df_log.shape)\n print(\"tfidf shape :\", tfidf.shape, \"\\n\")\n\n # compute topic space\n w, auc, precision, recall, threshold = compute_best_topic_space(\n result_directory, tfidf, df_log, d_best)\n print(\"--------------------------------------------\")\n print(\"w shape :\", w.shape)\n print(\"best auc :\", auc, \"(%i topics)\" % d_best[\"n_topics\"], \"\\n\")\n\n # fit the neighborhood\n radius, i_radius = find_radius(precision, recall, threshold, min_precision)\n neigh = define_neighbors(result_directory, w, radius, d_best[\"norm\"])\n print(\"recommendation radius :\", radius, \"\\n\")\n\n # plot precision recall curve\n graph_precision_recall(auc, recall, precision, i_radius)\n\n # find all neighborhoods\n # all_neighbors(result_directory, df_log, w, neigh)\n\n # get a 2D plan from the topic space\n (w_reduced_2d, variance_explained_2d, w_reduced_3d,\n variance_explained_3d) = dimensionality_reduction(result_directory, w)\n\n # plot reduced topic space\n plot_topic_space_reduced(result_directory, w_reduced_3d,\n variance_explained_3d)\n\n # reset new directories for neighbors plots\n create_specific_neighbors_directory(result_directory)\n\n # get information for a specific file\n for i_target in targeted_files:\n indices, distances = describe_one_file(result_directory, df_log, w,\n neigh, i_target, null_distance)\n\n # plot neighbors 3D\n graph_neighbors_3d_local(result_directory, df_log, w, indices,\n i_target, radius, str(i_target))\n\n return", "def gridalign(self):\n self.position.x = int(round(self.position.x))\n self.position.y = int(round(self.position.y))\n self.position.z = int(round(self.position.z))\n\n if self.fan:\n self.fan = (int(round(self.fan[0])),int(round(self.fan[1])),int(round(self.fan[2])))\n\n bestDist = 2*9\n bestMatrix = makeMatrix(0,0,0)\n\n for compass in [0, 90, 180, 270]:\n for pitch in [0, 90, 180, 270]:\n for roll in [0, 90, 180, 270]:\n m = makeMatrix(compass,pitch,roll)\n dist = matrixDistanceSquared(self.matrix, m)\n if dist < bestDist:\n bestMatrix = m\n bestDist = dist\n\n self.matrix = bestMatrix\n self.positionOut()\n self.directionOut()", "def get_corpus_alignments(self,bitext,f_vocab,e_vocab):\n alignments = [] # all alignments in the corpus\n sentence_alignments = {} ## associated alignments for each snetence pair\n sent_count = 0\n for pair in bitext:\n sentence_alignments[sent_count] = []\n f_sent = pair[\"fr\"]\n e_sent = pair[\"en\"]\n e_count = len(e_sent) # number of wrods in each sentence\n f_count = len(f_sent)\n ## generate all combinations of alignments\n tuple_sets = []\n # all possible e->f mappings for each english word in separate list\n for i in range(e_count): # getting english words count of sets of ali tuples\n list = []\n iv_idx = e_vocab.index(e_sent[i]) ## getting corresponding index of word in the the vocabulary list\n for j in range(f_count):\n jv_idx = f_vocab.index(f_sent[j])\n list.append((iv_idx,jv_idx)) #of form (e,f)\n tuple_sets.append(list)\n for combination in product(*tuple_sets): ## change thos for more than 3 words\n alignments.append(combination)\n sentence_alignments[sent_count].append(len(alignments)-1)\n sent_count += 1\n #print(alignments)\n return alignments,sentence_alignments", "def report_unknown_indel_results(allele1, allele2, \\\n five_prime_diff, five_prime_variant_len, three_prime_diff, three_prime_variant_len):\n \n if five_prime_diff == 'error - cannot compare' or three_prime_diff == 'error - cannot compare':\n relative_sequence_alignment = '{} and {} cannot be compared using the MSF file'.format(allele1, allele2)\n return relative_sequence_alignment\n \n if five_prime_diff == 'deletion':\n if three_prime_diff == 'deletion':\n relative_sequence_alignment = \"{} is a superset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len)\n\n elif three_prime_diff == 'insertion':\n relative_sequence_alignment = \"{} has a staggered 3' end overlap with {}; 5' end {}bp overhang; 3' {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif three_prime_diff == 'no diff':\n relative_sequence_alignment = \"{} is a superset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif five_prime_diff == 'insertion':\n if three_prime_diff == 'insertion':\n relative_sequence_alignment = \"{} is a subset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif three_prime_diff == 'deletion':\n relative_sequence_alignment = \"{} has a staggered 5' end overlap with {}; 5' {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif three_prime_diff == 'no diff':\n relative_sequence_alignment = \"{} is a subset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n \n elif five_prime_diff == 'no diff':\n if three_prime_diff == 'deletion':\n relative_sequence_alignment = \"{} is a superset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif three_prime_diff == 'insertion':\n relative_sequence_alignment = \"{} is a subset of {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n \n elif three_prime_diff == 'no diff':\n relative_sequence_alignment = \"{} is aligned with {}; 5' end {}bp overhang; 3' end {}bp overhang\".\\\n format(allele1, allele2, five_prime_variant_len, three_prime_variant_len) \n\n\n \n return relative_sequence_alignment", "def formatted_alignments(self,chosen_a_idxs,bitext,alignments,e_words,f_words):\n output =[]\n output_idxs = []\n for key in chosen_a_idxs.keys():\n temp = []\n temp_idx = []\n idx = chosen_a_idxs[key]\n alignment = alignments[idx]\n for t in alignment:\n temp.append((e_words[t[0]],f_words[t[1]]))\n temp_idx.append((bitext[key][\"en\"].index(e_words[t[0]]),bitext[key][\"fr\"].index(f_words[t[1]])))\n output.append(temp)\n output_idxs.append(temp_idx)\n return output,output_idxs", "def w_hungarian(prev_centroids,next_centroids,max_distance=50):\n xs0,ys0 = prev_centroids\n xs1,ys1 = next_centroids\n\n cost_matrix = fillCostMatrix(xs0,ys0,xs1,ys1)\n #make it disatvantageous to select any distance >max_distance\n cost_matrix[cost_matrix>(max_distance**2)]=np.max(cost_matrix)\n cost_matrix[cost_matrix==-1]=np.max(cost_matrix)\n xs,ys = linear_sum_assignment(cost_matrix)\n \n correspondance_list=[]\n for i in range(xs.size):\n correspondance_list.append( (xs[i],ys[i]) )\n apparition_list = [] \n elements_to_remove = [] \n for i, coords in enumerate(correspondance_list):\n if cost_matrix[coords]>max_distance**2:\n if coords[0]<len(xs0): #the left element exists\n correspondance_list[i] = (coords[0],-1)\n else: #the left element does not exists\n elements_to_remove.append(i)\n if coords[1]<len(xs1): #Add the right element only if it exists.\n apparition_list.append((-1,coords[1]))\n for j in range(len(elements_to_remove)):\n correspondance_list.pop(elements_to_remove[-(j+1)])\n correspondance_list.extend(apparition_list)\n return correspondance_list", "def compute_wer(self,pad_pred:torch.Tensor, pad_targets:torch.Tensor):\n ## Get HYP and REF Sentences using pad_pred and pad_targets\n target_tokens = [y[y!= self.ignore_label] for y in pad_targets]\n pred_lens = [len(y[y!= self.ignore_label]) for y in pad_targets]\n pred_pad = np.argmax(F.log_softmax(pad_pred,dim=-1).detach().cpu().numpy(),axis=-1) \n pred_tokens = [y[:pred_lens[i]] for i,y in enumerate(pred_pad)]\n self.pred_tokens = pred_tokens\n ref_lens = []\n word_eds = []\n for tgt,pred in zip(target_tokens,pred_tokens):\n self.ref = \"\".join([self.char_list[x] for x in tgt if x != -1]).replace(\"<space>\",\" \").replace(\"<eos>\",\"\")\n self.hyp = \"\".join([self.char_list[x] for x in pred]).replace(\"<space>\",\" \").replace(\"<eos>\",\"\")\n word_eds.append(editdistance.eval(self.ref.split(' '),self.hyp.split(' ')))\n ref_lens.append(len(self.ref.split(' ')))\n return float(sum(word_eds))/sum(ref_lens)", "def solve(self):\n self.a_tree = self.make_tree(self.a_strings)\n self.b_tree = self.make_tree(self.b_strings)\n\n # apply prefix filter\n self.s_options_for_beginning = self.prefix_filter()\n if len(self.s_options_for_beginning) == 0:\n self.final_sequence = 'IMPOSSIBLE'\n return\n\n # apply postfix filter\n self.s_options_for_ending = self.postfix_filter()\n if len(self.s_options_for_ending) == 0:\n self.final_sequence = 'IMPOSSIBLE'\n return\n\n # apply length balance filter (on the result after prefix and postfix filter)\n self.s_combinations_by_length = self.length_balance_filter(self.s_options_for_beginning,\n self.s_options_for_ending)\n if len(self.s_combinations_by_length) == 0:\n self.final_sequence = 'IMPOSSIBLE'\n return\n\n # apple elements balance filter (on the result after length balance filter)\n self.s_combinations_by_elements = self.elements_balance_filter(self.s_combinations_by_length)\n if len(self.s_combinations_by_elements) == 0:\n self.final_sequence = 'IMPOSSIBLE'\n return\n\n # sort filtered combinations by its elements length in ascending order\n self.s_combinations_filtered = sorted(self.s_combinations_by_elements.items())\n\n # for each combinations length try to find the lexicographically shortest sequence using depth-first search\n for length, combinations in self.s_combinations_filtered:\n for combination in combinations:\n self.combination = combination\n self.dfs()\n\n # return sequence as soon as we got one\n if self.final_sequence:\n return\n\n # otherwise - return 'IMPOSSIBLE'\n if not self.final_sequence:\n self.final_sequence = 'IMPOSSIBLE'\n return", "def minimal_align(self):\n desired = int(PlatformVar(\"align\"))\n for ii in range(len(self.__content)):\n line = self.__content[ii]\n match = re.match(r'.*\\.align\\s+(\\d+).*', line)\n if match:\n align = int(match.group(1))\n # Due to GNU AS compatibility modes, .align may mean different things.\n if osarch_is_amd64 or osarch_is_ia32():\n if desired != align:\n if is_verbose():\n print(\"Replacing %i-byte alignment with %i-byte alignment.\" % (align, desired))\n self.__content[ii] = \" .balign %i\\n\" % (desired)\n else:\n print(\"Replacing low-order bit alignment %i with %i-byte alignment.\" % (align, desired))\n self.__content[ii] = \" .balign %i\\n\" % (desired)", "def stereo_score(alignment):\n #dictionary with properties for each residue\n dic_prop = {'I': [1, 0, 0, 0, 0, 1, 0, 0, 0, 0],\n 'L': [1, 0, 0, 0, 0, 1, 0, 0, 0, 0],\n 'V': [1, 0, 1, 0, 0, 1, 0, 0, 0, 0],\n 'C': [1, 0, 1, 0, 0, 0, 0, 0, 0, 0],\n 'A': [1, 0, 1, 0, 1, 0, 0, 0, 0, 0],\n 'G': [1, 0, 1, 0, 1, 0, 0, 0, 0, 0],\n 'M': [1, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n 'F': [1, 0, 0, 0, 0, 0, 1, 0, 0, 0],\n 'Y': [1, 1, 0, 0, 0, 0, 1, 0, 0, 0],\n 'W': [1, 1, 0, 0, 0, 0, 1, 0, 0, 0],\n 'H': [1, 1, 0, 0, 0, 0, 1, 1, 0, 1],\n 'K': [1, 1, 0, 0, 0, 0, 0, 1, 0, 1],\n 'R': [0, 1, 0, 0, 0, 0, 0, 1, 0, 1],\n 'E': [0, 1, 0, 0, 0, 0, 0, 0, 1, 1],\n 'Q': [0, 1, 0, 0, 0, 0, 0, 0, 0, 0],\n 'D': [0, 1, 1, 0, 0, 0, 0, 0, 1, 1],\n 'N': [0, 1, 1, 0, 0, 0, 0, 0, 0, 0],\n 'S': [0, 1, 1, 0, 1, 0, 0, 0, 0, 0],\n 'T': [1, 1, 1, 0, 0, 0, 0, 0, 0, 0],\n 'P': [0, 0, 1, 1, 0, 0, 0, 0, 0, 0],\n 'B': [0, 1, 0, 0, 0, 0, 0, 0, 0, 0],\n 'Z': [0, 1, 0, 0, 0, 0, 0, 0, 0, 0],\n 'X': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1],\n '-': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]}\n score_list = []\n for i in range(0, alignment.get_alignment_length()):\n #extract the unique residues in the alignment\n column = ''.join(set(alignment[:, i]))\n stereo_list = []\n #loop through each residue\n for res in range(0, len(column)):\n #replace the residue with list of properties\n residue = column[res]\n #append the properties list to a\n stereo_prop = dic_prop.get(residue)\n stereo_list.append(stereo_prop)\n #number of common properties\n count_stereo = sum(len(set(i)) == 1 for i in zip(*stereo_list))\n #add the number of properties to a list\n score_list.append(count_stereo)\n score_list_final = [float(i*0.1) for i in score_list]\n return score_list_final", "def align_preprocessed(self, img):\n aligner = FaceAligner(self.args.wing_path, self.args.lm_path, self.args.img_size)\n return aligner.align(img)", "def _self_align(self):\n logging.info(\"Splitting palindrome.\")\n logging.debug(\"Making reverse complement sequences of reads in \" +\n \"{i} to {o}\".format(i=self.ori_all_reads_fasta,\n o=self.rc_all_reads_fasta))\n num_reads = revcmp_fasta(self.ori_all_reads_fasta,\n self.rc_all_reads_fasta)\n\n reads_per_split = max(1, int(num_reads/self.nproc) + 1)\n logging.debug(\"Splitting {f} to small files each containing {n} reads.\".\n format(f=self.ori_all_reads_fasta, n=reads_per_split))\n fs = FastaSplitter(input_fasta=self.ori_all_reads_fasta,\n reads_per_split=reads_per_split,\n out_dir=self.out_dir,\n out_prefix=\"reads.split.\")\n fs.split()\n sp_fasta_files = fs.out_fns\n\n logging.debug(\"Splitting {f} to smaller files.\".\n format(f=self.rc_all_reads_fasta))\n rc_fs = FastaSplitter(input_fasta=self.rc_all_reads_fasta,\n reads_per_split=reads_per_split,\n out_dir=self.out_dir,\n out_prefix=\"rc_reads.split.\")\n rc_fs.split()\n rc_sp_fasta_files = rc_fs.out_fns\n\n logging.debug(\"Aligning each read in {i} to its revese compelement \" +\n \"read using sdpMatcher.\".format(i=self.ori_all_reads_fasta))\n\n sdps = [\"{f}.sdp\".format(f=f) for f in sp_fasta_files]\n jobs = []\n for f, rc_f, sdp in zip(sp_fasta_files, rc_sp_fasta_files, sdps):\n cmd = \"sdpMatcher {f} {rc_f} \".format(f=f, rc_f=rc_f) + \\\n \"10 -local > {sdp} \".format(sdp=sdp)\n logging.debug(\"CMD: {cmd}\".format(cmd=cmd))\n jobs.append(cmd)\n\n pool = Pool(processes=self.nproc)\n rets = pool.map(backticks, jobs)\n pool.close()\n pool.join()\n\n for i, job in enumerate(jobs):\n if rets[i][1] != 0:\n errMsg = \"Job {j} failed.\".format(j=job) + str(rets[i][2])\n raise RuntimeError(errMsg)\n\n logging.debug(\"Concatenating all sdp outputs to {f}\".\n format(f=self.sdp_out_file))\n cat_files(src=sdps, dst=self.sdp_out_file)\n\n logging.debug(\"Cleaning intermediate fasta & sdp files.\")\n fs.rmOutFNs()\n rc_fs.rmOutFNs()\n\n for f in sdps:\n os.remove(f)", "def word_analogy(self):\n data = open(\"data/word_analogy_subset.en.ar.txt\").read().split('\\n')\n data = [x for x in data if len(x.split()) == 4]\n cnt = 0\n keys = list(self.embeddings_index.keys())\n vectors = np.array(list(self.embeddings_index.values()))\n norms = np.linalg.norm(vectors, axis=1)\n for i in data:\n i = self.preprocessor(i).split()\n try:\n v = self.embeddings_index[i[0]] - self.embeddings_index[i[1]] + self.embeddings_index[i[2]]\n except:\n continue\n unit = v / np.linalg.norm(v)\n dists = np.dot(vectors, unit) / norms\n best = np.argpartition(-dists, 10)[:10 + 1]\n best = best.take(np.argsort((-dists).take(best)))\n result = [(keys[sim], float(dists[sim]))\n for sim in best]\n sbv = result[:10]\n for j in sbv:\n if j[0] == i[3]:\n cnt += 1\n return cnt/ len(data)", "def get_alignment_params(self, s, w):\n\n X1 = s.__get_X(w)\n X2 = self.__get_X(w)\n Y1 = s.__get_Y(w)\n Y2 = self.__get_Y(w)\n Z = self.__get_Z(w)\n W = sum(w)\n C1 = self.__get_C1(w, s)\n C2 = self.__get_C2(w, s)\n\n a = np.array([[ X2, -Y2, W, 0],\n [ Y2, X2, 0, W],\n [ Z, 0, X2, Y2],\n [ 0, Z, -Y2, X2]])\n\n b = np.array([X1, Y1, C1, C2])\n # Solve equations\n # result is [ax, ay, tx, ty]\n return np.linalg.solve(a, b)", "def _PD_hamming(alignA, alignB, subst, bySite, withinA, ignoreGaps=True):\n L = len(alignA.iloc[0])\n gapCode = AA2CODE['-']\n\n \"\"\"Convert alignments into integer arrays first to speed comparisons\"\"\"\n matA = np.zeros((len(alignA), L))\n for seqi, s in enumerate(alignA):\n matA[seqi,:] = _seq2vec(s)\n if not withinA:\n matB = np.zeros((len(alignB), L))\n for seqi, s in enumerate(alignB):\n matB[seqi,:] = _seq2vec(s)\n\n \"\"\"Dist will be 1 where equal, 0 where not and nan if one is a gap\"\"\"\n if withinA:\n dist=np.zeros((int(scipy.special.comb(len(alignA), 2)), L))\n allPairs = itertools.combinations(np.arange(len(alignA)), 2)\n for j, (seqi1, seqi2) in enumerate(allPairs):\n dist[j,:] = matA[seqi1,:]!=matA[seqi2,:]\n if ignoreGaps:\n gapInd = (matA[seqi1,:]==gapCode) | (matA[seqi2,:]==gapCode)\n dist[j, gapInd] = np.nan\n else:\n dist=np.zeros((len(alignA)*len(alignB), L))\n allPairs = itertools.product(np.arange(len(alignA)), np.arange(len(alignB)))\n for j, (seqiA, seqiB) in enumerate(allPairs):\n dist[j,:] = matA[seqiA,:]!=matB[seqiB,:]\n if ignoreGaps:\n gapInd = (matA[seqiA,:]==gapCode) | (matB[seqiB,:]==gapCode)\n dist[j, gapInd] = np.nan\n\n if not bySite:\n dist=np.nanmean(dist, axis=1)\n return np.nanmean(dist, axis=0)", "def __init__(self, targetString, w, k, t):\n \n self.targetString = targetString\n self.w = w\n self.k = k\n self.t = t # If a minmer occurs more than t times then its entry is removed from the index\n # This is a heuristic to remove repetitive minmers that would create many spurious alignments between\n # repeats\n \n # Hash of minmers to query locations, stored as a map whose keys\n # are minmers and whose values are lists of the start indexes of\n # occurrences of the corresponding minmer in the targetString, \n # sorted in ascending order of index in the targetString.\n #\n # For example if k = 2 and w = 4 and targetString = \"GATTACATTT\"\n #\n # GATTACATTT\n # GATT (AT)\n # ATTA (AT)\n # TTAC (AC)\n # TACA (AC)\n # ACAT (AC)\n # CATT (AT)\n # ATTT (AT)\n #\n # then self.minimizerMap = { \"AT\":(1,6), \"AC\":(4,) }\n self.minimizerMap = {}\n # Code to complete to build index - you are free to define additional functions\n self.size = len(targetString)\n for nuc in range(self.size - self.w + 1):\n window = self.targetString[nuc: nuc + self.w]\n minimer = None # some holder for the minimizer\n\n # implement the window alogrithm from the reading.\n for k_position in range(self.w - k + 1):\n kmer = window[k_position: k_position + k]\n if minimer is None or kmer < minimer[0]: \n minimer = (kmer, nuc + k_position)\n self.minimizerMap.setdefault(minimer[0], set()).add(minimer[1])\n if len(self.minimizerMap[minimer[0]]) > t: del self.minimizerMap[minimer[0]]" ]
[ "0.6580737", "0.6504036", "0.64750624", "0.6450722", "0.6416593", "0.63871074", "0.6386579", "0.6293225", "0.61991656", "0.6193383", "0.6179944", "0.6168121", "0.61549205", "0.6107183", "0.60953724", "0.60164833", "0.59894025", "0.5960489", "0.5937504", "0.5936695", "0.5934356", "0.5892217", "0.58837473", "0.5857752", "0.5850987", "0.57983416", "0.578789", "0.5780039", "0.5757632", "0.5752239", "0.5749516", "0.5742853", "0.57305527", "0.57165307", "0.570925", "0.5679468", "0.5674312", "0.56637484", "0.5662352", "0.563172", "0.5625491", "0.5584149", "0.55766374", "0.55583626", "0.5539079", "0.5526412", "0.55163884", "0.5484157", "0.5479933", "0.54721785", "0.5471859", "0.54679614", "0.5437548", "0.5433647", "0.5424928", "0.53967184", "0.5384861", "0.5378258", "0.5366095", "0.5360041", "0.5358326", "0.5356993", "0.5350372", "0.53486407", "0.53354216", "0.5333716", "0.53314877", "0.53194815", "0.53170586", "0.5312927", "0.5296133", "0.52927256", "0.52916473", "0.52904904", "0.5285716", "0.5278425", "0.5276947", "0.52704626", "0.5258296", "0.5251406", "0.52458256", "0.5237113", "0.52357775", "0.52350175", "0.52265555", "0.5224814", "0.52237535", "0.5219106", "0.5207324", "0.52025187", "0.5195235", "0.5194675", "0.5192898", "0.5191867", "0.51902544", "0.51883", "0.5186346", "0.5185138", "0.5179142", "0.51682687", "0.5163687" ]
0.0
-1
Fast translation, rotation & scale in 2D using np.einsum in case input is not a single point
def fast_TRS_2d(input, transform_matrix, input_is_point=False): if input_is_point: return np.delete(np.dot(transform_matrix, np.insert(input, 2, 1)), 2) else: return np.delete(np.einsum('jk,ik->ij', transform_matrix, np.insert(input, 2, 1, axis=1)), 2, 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transAffine2D( iScale=(1, 1), iTrans=(0, 0), iRot=0, iShear=(0, 0) ): \n iRot = iRot * np.pi / 180\n oMatScale = np.matrix( ((iScale[0],0,0),(0,iScale[1],0),(0,0,1)) )\n oMatTrans = np.matrix( ((1,0,iTrans[0]),(0,1,iTrans[1]),(0,0,1)) )\n oMatRot = np.matrix( ((np.cos(iRot),-np.sin(iRot),0),\\\n (np.sin(iRot),np.cos(iRot),0),(0,0,1)) )\n oMatShear = np.matrix( ((1,iShear[0],0),(iShear[1],1,0),(0,0,1)) )\n # ustvari izhodno matriko\n oMat2D = oMatTrans * oMatShear * oMatRot * oMatScale\n return oMat2D", "def affine_transform(trans_mat, p0):\r\n n_data, n_dim = np.shape(p0)\r\n p0 = np.hstack((p0, np.ones((n_data, 1))))\r\n #return np.transpose(np.dot(np.transpose(trans_mat), np.transpose(p0)))\r\n return np.dot(p0, trans_mat)", "def apply_direction_scale( vectors, direction, scale ):\n \"\"\"\n scaling is defined as:\n \n [p'][1 + (k - 1)n.x^2, (k - 1)n.x n.y^2, (k - 1)n.x n.z ]\n S(n,k) = [q'][(k - 1)n.x n.y, 1 + (k - 1)n.y, (k - 1)n.y n.z ]\n [r'][(k - 1)n.x n.z, (k - 1)n.y n.z, 1 + (k - 1)n.z^2 ]\n \n where:\n v' is the resulting vector after scaling\n v is the vector to scale\n n is the direction of the scaling\n n.x is the x component of n\n n.y is the y component of n\n n.z is the z component of n\n k is the scaling factor\n \"\"\"\n scaleMinus1 = scale - 1\n matrix = numpy.array(\n [\n # m1\n [\n # m11 = 1 + (k - 1)n.x^2\n 1 + scaleMinus1 * (direction[ 0 ]**2),\n # m12 = (k - 1)n.x n.y^2\n scaleMinus1 * direction[ 0 ] * direction[ 1 ]**2,\n # m13 = (k - 1)n.x n.z\n scaleMinus1 * direction[ 0 ] * direction[ 2 ]\n ],\n # m2\n [\n # m21 = (k - 1)n.x n.y\n scaleMinus1 * direction[ 0 ] * direction[ 1 ],\n # m22 = 1 + (k - 1)n.y\n 1 + scaleMinus1 * direction[ 1 ],\n # m23 = (k - 1)n.y n.z\n scaleMinus1 * direction[ 1 ] * direction[ 2 ]\n ],\n # m3\n [\n # m31 = (k - 1)n.x n.z\n scaleMinus1 * direction[ 0 ] * direction[ 2 ],\n # m32 = (k - 1)n.y n.z\n scaleMinus1 * direction[ 1 ] * direction[ 2 ],\n # m33 = 1 + (k - 1)n.z^2\n 1 + scaleMinus1 * direction[ 2 ]**2\n ]\n ],\n dtype = numpy.float\n )\n \n return numpy.dot( vectors, matrix )", "def estimate_affine_matrix_3d_to_2d(X, x):\n assert x.shape[0] == X.shape[0]\n assert x.shape[0] >= 4\n X = X.T # (3, n)\n x = x.T # (2, n)\n n = x.shape[1]\n\n ###---- 1. normalization\n ## 2d points\n mean = np.mean(x, 1) # (2, )\n x = x - np.tile(mean[:, np.newaxis], [1, n]) # (2, n)\n average_norm = np.mean(np.sqrt(np.sum(x ** 2, 0)))\n scale = np.sqrt(2) / average_norm\n x = scale * x\n\n # T = [[scale, 0, -mean * scale], \n # [ 0, scale, -mean * scale], \n # [ 0, 0, 1 ]]\n T = np.zeros((3, 3), dtype=np.float32)\n T[0, 0] = T[1, 1] = scale\n T[:2, 2] = -mean * scale\n T[2, 2] = 1\n\n ## 3d points\n X_homo = np.vstack((X, np.ones((1, n)))) # (4, n)\n mean = np.mean(X, 1) # (3, )\n X = X - np.tile(mean[:, np.newaxis], [1, n]) # (3, n)\n m = X_homo[: 3, :] - X\n average_norm = np.mean(np.sqrt(np.sum(X ** 2, 0)))\n scale = np.sqrt(3) / average_norm\n X = scale * X\n\n U = np.zeros((4, 4), dtype=np.float32)\n U[0, 0] = U[1, 1] = U[2, 2] = scale\n U[: 3, 3] = -mean * scale\n U[3, 3] = 1\n\n ###---- 2. equations\n A = np.zeros((n * 2, 8), dtype=np.float32)\n X_homo = np.vstack((X, np.ones((1, n)))).T\n A[: n, : 4] = X_homo\n A[n: , 4: ] = X_homo\n b = np.reshape(x, [-1, 1]) # (2n, 1)\n\n ###---- 3.solution\n p_8 = np.linalg.pinv(A).dot(b) # (8, 2n) x (2n, 1) -> (8, 1)\n p = np.zeros((3, 4), dtype=np.float32)\n p[0, :] = p_8[:4, 0]\n p[1, :] = p_8[4:, 0]\n p[-1, -1] = 1\n\n ###---- 4. denormalization\n P_Affine = np.linalg.inv(T).dot(p.dot(U))\n return P_Affine", "def __compose_transformation(self):\n s = self.scale\n rotR = self.rotation\n t = self.translation\n T = np.eye(4)\n T[0:3, 3] = t\n R = np.eye(4)\n R[0:3, 0:3] = rotR\n M = T.dot(R)\n if s == 1:\n M = T.dot(R)\n else:\n S = np.eye(4)\n S[0:3, 0:3] = np.diag([s, s, s])\n M = T.dot(R).dot(S)\n return M", "def get_affine_matrix2d(\n translations: torch.Tensor,\n center: torch.Tensor,\n scale: torch.Tensor,\n angle: torch.Tensor,\n sx: Optional[torch.Tensor] = None,\n sy: Optional[torch.Tensor] = None,\n) -> torch.Tensor:\n transform: torch.Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def AffineTransform( from_pts, to_pts ):\n \n # check that there are match points\n if len(from_pts) != len(to_pts) or len(to_pts)<1:\n print \"from_pts and to_pts must be of same size.\"\n return False\n\n # check the dimensions\n dim = len(from_pts[0]) # num of dimensions\n if len(from_pts) < dim:\n print \"Too few points => under-determined system.\"\n return False\n elif len(from_pts) > dim + 1:\n print \"Too many points => over-determined system.\"\n return False\n\n \n #segregate the x and y coordinages\n from_pts_x, from_pts_y = zip(*from_pts)\n to_pts_x, to_pts_y = zip(*to_pts)\n \n #create the Matricies for processing\n I = np.matrix([from_pts_x, from_pts_y, [1,1,1]])\n P = np.matrix([to_pts_x, to_pts_y])\n \n #Calculate the 2D affine transform matrix (A)\n A = P * linalg.pinv(I) \n\n # Make a result object\n class Transformation:\n \"\"\"Result object that represents the transformation\n from affine fitter.\"\"\"\n\n def To_Str(self):\n res = \"\"\n for j in range(dim):\n str1 = \"x%d' = \" % j\n for i in range(dim):\n str1 +=\"x%d * %f + \" % (i, A[i][j+dim+1])\n str1 += \"%f\" % A[dim][j+dim+1]\n res += str1 + \"\\n\"\n return res\n\n def Transform(self, pt_x, pt_y):\n pt_vector = np.matrix([[pt_x], [pt_y], [1]])\n transformed_pt = A * pt_vector\n return map(itemgetter(0), transformed_pt.tolist())\n return Transformation()", "def get_affine_matrix2d(\n translations: Tensor,\n center: Tensor,\n scale: Tensor,\n angle: Tensor,\n sx: Tensor | None = None,\n sy: Tensor | None = None,\n) -> Tensor:\n transform: Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def contract(tensor):\n temp = np.einsum('ikma, jlan', tensor, tensor)\n M = np.zeros((tensor.shape[0]**2, tensor.shape[1]**2, tensor.shape[2], tensor.shape[3]))\n for i,j,k,l,m,n in it.product(*[range(x) for x in temp.shape]):\n M[i + tensor.shape[0]*j, k + tensor.shape[1]*l, m, n] = temp[i,j,k,l,m,n]\n return M", "def transformAffine(self, coords):\n coordsshape = coords.shape\n dims = coordsshape[0] + 1\n coords = coords.reshape((len(coords), -1))\n coords = np.concatenate((coords, np.ones((1, len(coords[0])))), 0)\n affine = np.eye(dims)\n # now transform first to center:\n meanvec = np.mean(coords, 1)\n center = np.eye(dims)\n center[:-1, -1] = -meanvec[:-1]\n affine = np.matmul(center, affine)\n\n if np.sum(self.shift):\n affine[:-1, -1] += (self.deformrandomstate.rand(dims - 1) - 0.5) * np.float32(self.shift)\n if np.max(self.scaling) > 1:\n scales = np.ones(dims)\n # scales[:-1] = (self.deformrandomstate.rand(dims-1)-0.5)*(self.scaling-1.0/self.scaling)+(self.scaling+1/self.scaling)/2\n scales[:-1] = self.scaling ** (self.deformrandomstate.rand(dims - 1) * 2 - 1)\n scales = np.diag(scales)\n # print(scales)\n affine = np.matmul(scales, affine)\n if np.sum(self.rotation):\n affine = self._rotate(affine)\n # move back to location:\n center[:-1, -1] = -center[:-1, -1]\n affine = np.matmul(center, affine)\n # now appyl to coords:\n coords = np.matmul(affine, coords)\n coords = coords[:-1]\n coords = coords.reshape(coordsshape)\n return coords", "def transform(fn):\n def _(vec, dt):\n return np.einsum(\n 'ji,i,ki,k...->j...',\n evecs, fn(evals, dt), evecs, vec, optimize=True)\n\n return _", "def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True):\n\n\n\n v0 = np.array(v0, dtype=np.float64, copy=True)\n v1 = np.array(v1, dtype=np.float64, copy=True)\n\n ndims = v0.shape[0]\n if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape:\n print(ndims < 2)\n print(v0.shape[1] < ndims)\n print(v0.shape != v1.shape)\n\n print(ndims)\n\n raise ValueError(\"input arrays are of wrong shape or type\")\n\n # move centroids to origin\n t0 = -np.mean(v0, axis=1)\n M0 = np.identity(ndims+1)\n M0[:ndims, ndims] = t0\n v0 += t0.reshape(ndims, 1)\n t1 = -np.mean(v1, axis=1)\n M1 = np.identity(ndims+1)\n M1[:ndims, ndims] = t1\n v1 += t1.reshape(ndims, 1)\n\n if shear:\n # Affine transformation\n A = np.concatenate((v0, v1), axis=0)\n u, s, vh = np.linalg.svd(A.T)\n vh = vh[:ndims].T\n B = vh[:ndims]\n C = vh[ndims:2*ndims]\n t = np.dot(C, np.linalg.pinv(B))\n t = np.concatenate((t, np.zeros((ndims, 1))), axis=1)\n M = np.vstack((t, ((0.0,)*ndims) + (1.0,)))\n elif usesvd or ndims != 3:\n # Rigid transformation via SVD of covariance matrix\n u, s, vh = np.linalg.svd(np.dot(v1, v0.T))\n # rotation matrix from SVD orthonormal bases\n R = np.dot(u, vh)\n if np.linalg.det(R) < 0.0:\n # R does not constitute right handed system\n R -= np.outer(u[:, ndims-1], vh[ndims-1, :]*2.0)\n s[-1] *= -1.0\n # homogeneous transformation matrix\n M = np.identity(ndims+1)\n M[:ndims, :ndims] = R\n else:\n # Rigid transformation matrix via quaternion\n # compute symmetric matrix N\n xx, yy, zz = np.sum(v0 * v1, axis=1)\n xy, yz, zx = np.sum(v0 * np.roll(v1, -1, axis=0), axis=1)\n xz, yx, zy = np.sum(v0 * np.roll(v1, -2, axis=0), axis=1)\n N = [[xx+yy+zz, 0.0, 0.0, 0.0],\n [yz-zy, xx-yy-zz, 0.0, 0.0],\n [zx-xz, xy+yx, yy-xx-zz, 0.0],\n [xy-yx, zx+xz, yz+zy, zz-xx-yy]]\n # quaternion: eigenvector corresponding to most positive eigenvalue\n w, V = np.linalg.eigh(N)\n q = V[:, np.argmax(w)]\n q /= vector_norm(q) # unit quaternion\n # homogeneous transformation matrix\n M = quaternion_matrix(q)\n\n if scale and not shear:\n # Affine transformation; scale is ratio of RMS deviations from centroid\n v0 *= v0\n v1 *= v1\n M[:ndims, :ndims] *= math.sqrt(np.sum(v1) / np.sum(v0))\n\n # move centroids back\n M = np.dot(np.linalg.inv(M1), np.dot(M, M0))\n M /= M[ndims, ndims]\n return M", "def affine_2Dtransform(img, t_mat, height, width, h_offset=0, w_offset=0, nh_flag=False, nw_flag=False):\n # transform matrix must be validated\n if(np.shape(t_mat) != (2, 2)):\n return img\n\n # implementing matrix multiplication to a default map of source data in order to apply transform\n # and to achieve coordination/location of transformed matrix according to source data(data map)\n coord_map = transform_calcualtion(\n height, width, t_mat, h_offset, w_offset, nh_flag, nw_flag)\n\n # transformed image data construction\n t_img = np.full((height+h_offset, width+w_offset, 3), 255, dtype='uint8')\n\n # applying new map to image inorder to complete the transform\n try:\n for i in range(height):\n for j in range(width):\n [i_new_coord, j_new_coord] = coord_map[i, j, :]\n # unhandled bound-jumpout\n t_img[i_new_coord, j_new_coord, :] = img[i, j, :]\n except:\n print(\"not enough offset/negative coordination pushed\")\n return img\n return t_img", "def complex_mul2d(a, b):\n op = partial(torch.einsum, \"bixy,ioxy->boxy\")\n return torch.stack([\n op(a[..., 0], b[..., 0]) - op(a[..., 1], b[..., 1]),\n op(a[..., 1], b[..., 0]) + op(a[..., 0], b[..., 1])\n ],\n dim=-1)", "def compose_transform(T1, T2):\n aux_vec = np.array([0, 0, 1]).reshape(1, 3)\n\n T1 = np.concatenate((T1, aux_vec), axis=0)\n T2 = np.concatenate((T2, aux_vec), axis=0)\n\n T1_inv = np.linalg.inv(T1)\n T = T1_inv@T2\n\n return T[0:2]", "def apply_transformation_np(source, transformation):\n source_homog = np.ones((source.shape[0], 4))\n source_homog[:, :-1] = source\n # source_homog = np.hstack(\n # (source, np.ones(source.shape[0], 1))\n # )\n\n source_transformed = np.matmul(transformation, source_homog.T).T[:, :-1]\n return source_transformed", "def affine_transform_2d(v, mapping, alpha = 1):\r\n p_wgt = vec2(0, 0)\r\n q_wgt = vec2(0, 0)\r\n w = len(mapping)*[None]\r\n w_sum = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n x = mp[0].x - v.x\r\n y = mp[0].y - v.y\r\n if (x == 0 and y == 0): return mp[1]\r\n w[i] = 1/((x*x + y*y) ** alpha)\r\n p_wgt += mp[0]*w[i]\r\n q_wgt += mp[1]*w[i]\r\n w_sum += w[i]\r\n p_wgt /= w_sum\r\n q_wgt /= w_sum\r\n M1 = mat2(0)\r\n M2 = mat2(0)\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n q_adj = mp[1] - q_wgt\r\n M1 += p_adj.transpose_multiply(p_adj)*w[i]\r\n M2 += p_adj.transpose_multiply(q_adj)*w[i]\r\n M1 = M1.inverse()\r\n M = M1*M2\r\n M = M.transpose()\r\n v_out = M*(v - p_wgt) + q_wgt\r\n return v_out", "def get_perspective_transform(points_src: Tensor, points_dst: Tensor) -> Tensor:\n KORNIA_CHECK_SHAPE(points_src, [\"B\", \"4\", \"2\"])\n KORNIA_CHECK_SHAPE(points_dst, [\"B\", \"4\", \"2\"])\n KORNIA_CHECK(points_src.shape == points_dst.shape, \"Source data shape must match Destination data shape.\")\n KORNIA_CHECK(points_src.dtype == points_dst.dtype, \"Source data type must match Destination data type.\")\n\n # we build matrix A by using only 4 point correspondence. The linear\n # system is solved with the least square method, so here\n # we could even pass more correspondence\n\n # create the lhs tensor with shape # Bx8x8\n B: int = points_src.shape[0] # batch_size\n\n A = torch.empty(B, 8, 8, device=points_src.device, dtype=points_src.dtype)\n\n # we need to perform in batch\n _zeros = zeros(B, device=points_src.device, dtype=points_src.dtype)\n _ones = torch.ones(B, device=points_src.device, dtype=points_src.dtype)\n\n for i in range(4):\n x1, y1 = points_src[..., i, 0], points_src[..., i, 1] # Bx4\n x2, y2 = points_dst[..., i, 0], points_dst[..., i, 1] # Bx4\n\n A[:, 2 * i] = stack([x1, y1, _ones, _zeros, _zeros, _zeros, -x1 * x2, -y1 * x2], -1)\n A[:, 2 * i + 1] = stack([_zeros, _zeros, _zeros, x1, y1, _ones, -x1 * y2, -y1 * y2], -1)\n\n # the rhs tensor\n b = points_dst.view(-1, 8, 1)\n\n # solve the system Ax = b\n X: Tensor = _torch_solve_cast(A, b)\n\n # create variable to return the Bx3x3 transform\n M = torch.empty(B, 9, device=points_src.device, dtype=points_src.dtype)\n M[..., :8] = X[..., 0] # Bx8\n M[..., -1].fill_(1)\n\n return M.view(-1, 3, 3) # Bx3x3", "def get_affine_transform(center, scale, rot, output_size, shift=(0.0, 0.0), inv=False):\n assert len(center) == 2\n assert len(scale) == 2\n assert len(output_size) == 2\n assert len(shift) == 2\n scale_tmp = scale * 200.0\n shift = np.array(shift)\n src_w = scale_tmp[0]\n dst_w = output_size[0]\n dst_h = output_size[1]\n rot_rad = np.pi * rot / 180\n src_dir = rotate_point([0.0, src_w * -0.5], rot_rad)\n dst_dir = np.array([0.0, dst_w * -0.5])\n src = np.zeros((3, 2), dtype=np.float32)\n src[0, :] = center + scale_tmp * shift\n src[1, :] = center + src_dir + scale_tmp * shift\n src[2, :] = _get_3rd_point(src[0, :], src[1, :])\n dst = np.zeros((3, 2), dtype=np.float32)\n dst[0, :] = [dst_w * 0.5, dst_h * 0.5]\n dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir\n dst[2, :] = _get_3rd_point(dst[0, :], dst[1, :])\n if inv:\n trans = cv2.getAffineTransform(np.float32(dst), np.float32(src))\n else:\n trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))\n return trans", "def rigid_transform_2d(v, mapping, alpha = 1):\r\n p_wgt = vec2(0, 0)\r\n q_wgt = vec2(0, 0)\r\n w = len(mapping)*[None]\r\n w_sum = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n x = mp[0].x - v.x\r\n y = mp[0].y - v.y\r\n if (x == 0 and y == 0): return mp[1]\r\n w[i] = 1/((x*x + y*y) ** alpha)\r\n p_wgt += mp[0]*w[i]\r\n q_wgt += mp[1]*w[i]\r\n w_sum += w[i]\r\n p_wgt /= w_sum\r\n q_wgt /= w_sum\r\n A_fac = mat2([v.x - p_wgt.x, v.y - p_wgt.y, v.y - p_wgt.y, p_wgt.x - v.x])\r\n v_out = vec2(0, 0)\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n q_adj = mp[1] - q_wgt\r\n A = mat2([p_adj.x, p_adj.y, p_adj.y, -p_adj.x])*A_fac*w[i]\r\n A = A.transpose()\r\n v_out += A*q_adj\r\n r = math.sqrt(v_out.dot(v_out))\r\n v_out /= r\r\n v_sub = v - p_wgt\r\n r = math.sqrt(v_sub.dot(v_sub))\r\n v_out *= r\r\n v_out += q_wgt\r\n return v_out", "def _e_2d_(p, a):\r\n diff = a - p[np.newaxis, :]\r\n return np.einsum('ij,ij->i', diff, diff)", "def affineTransform(x,output_dim):\n w=tf.get_variable(\"w\", [x.get_shape()[1], output_dim])\n b=tf.get_variable(\"b\", [output_dim], initializer=tf.constant_initializer(0.0))\n return tf.matmul(x,w)+b", "def transform_calcualtion(height, width, t_mat, h_offset, w_offset, nh_flag, nw_flag):\n # default coordination/location of transformed matrix according to source data(data map)\n coord_map = default_coord(height, width, h_offset, w_offset)\n\n for i in range(height):\n for j in range(width):\n # base calculations\n result = [(t_mat[0][0])*(coord_map[i, j, 0])+int((t_mat[0][1])*(coord_map[i, j, 1])),\n (t_mat[1][0])*(coord_map[i, j, 0])+(t_mat[1][1])*(coord_map[i, j, 1])]\n # since all coordinations must not be negative\n # if happened also apply a translation by offset\n coord_map[i, j, :] = [(result[0], result[0]+h_offset)[nh_flag],\n (result[1], result[1]+w_offset)[nw_flag]]\n return coord_map", "def translate(self, x=0, y=0, z=0):\n\t\ttranslation = np.identity(4)\n\t\ttranslation[0, 3] += x\n\t\ttranslation[1, 3] += y\n\t\ttranslation[2, 3] += z\n\t\t\n\t\tself.matrix = np.matmul(self.matrix, translation)", "def scale_and_translate_points(points):\n x = points[0]\n y = points[1]\n center = points.mean(axis=1) # mean of each row\n cx = x - center[0] # center the points\n cy = y - center[1]\n dist = np.sqrt(np.power(cx, 2) + np.power(cy, 2))\n scale = np.sqrt(2) / dist.mean()\n norm3d = np.array([\n [scale, 0, -scale * center[0]],\n [0, scale, -scale * center[1]],\n [0, 0, 1]\n ])\n\n return np.dot(norm3d, points), norm3d", "def project(A):\n return A.T @ np.linalg.pinv(A @ A.T) @ A", "def apply_affine_transform(x, M):\n is1d = len(x.shape) == 1\n if is1d:\n x = np.expand_dims(x, axis=0)\n\n x_hom = np.concatenate(\n [x, np.ones((x.shape[0], 1), dtype=x.dtype)], axis=-1\n )\n x_out = x_hom @ M.T\n if is1d:\n x_out = np.squeeze(x_out, axis=0)\n return x_out", "def imageTransform(self):\n ims = self.imageShape\n acs = self.activeShape\n dx = self.colVector\n dy = self.rowVector\n\n p0 = self.activeOrigin\n p1 = p0 + acs[2] * dx\n p2 = p0 + acs[1] * dy\n\n # print p0, p1, p2\n # print acs, dx, dy\n\n localPts = list(map(pg.Vector, [[0,0], [ims[2],0], [0,ims[1]], [0,0,1]])) # w and h of data of image in pixels.\n globalPts = list(map(pg.Vector, [p0, p1, p2, [0,0,1]]))\n m = pg.solve3DTransform(localPts, globalPts)\n m[:,2] = m[:,3]\n m[2] = m[3]\n m[2,2] = 1\n tr = Qt.QTransform(*m[:3,:3].transpose().reshape(9))\n return tr", "def affine_mult(affine, coordinates):\n return np.dot(coordinates, affine[:3, :3].T) + affine[:3, -1]", "def magma_sgemv(trans, m, n, alpha, dA, ldda, dx, incx, beta,\n dy, incy, queue):\n\n _libmagma.magma_sgemv(trans, m, n, alpha, int(dA), ldda, dx, incx,\n beta, int(dy), incy, queue)", "def estimate_rigid_transform(points1, points2, translation_only=False):\n centroid1 = points1.mean(axis=0)\n centroid2 = points2.mean(axis=0)\n\n if translation_only:\n rotation = np.eye(2)\n translation = centroid2 - centroid1\n\n else:\n centered_points1 = points1 - centroid1\n centered_points2 = points2 - centroid2\n\n sigma = centered_points2.T @ centered_points1\n U, _, Vt = np.linalg.svd(sigma)\n\n rotation = U @ Vt\n translation = -rotation @ centroid1 + centroid2\n\n H = np.eye(3)\n H[:2,:2] = rotation\n H[:2, 2] = translation\n return H", "def trans_matrix_inv(m:numpy.ndarray):\n was2d = False\n if m.shape[1] == 3:\n was2d = True\n m = numpy.asarray([\n [1.0, 0.0, 0.0, 0.0],\n [0.0, m[0,0], m[0,1], m[0,2]],\n [0.0, m[1,0], m[1,1], m[1,2]],\n [0.0, 0.0, 0.0, 1.0]], numpy.float64)\n trans = m[0:3,3]\n rotate = numpy.zeros(3, numpy.float64)\n r = m[0:3,0:3]\n rc = numpy.linalg.cholesky(numpy.matmul(r.T, r)).T\n scale = numpy.diagonal(rc)\n if numpy.linalg.det(r) < 0.0:\n scale[0] *= -1.0\n rcd = rc * numpy.eye(3, dtype=numpy.float64)\n rc = numpy.linalg.solve(rcd, rc)\n shear = numpy.asarray([rc[0,1], rc[0,2], rc[1,2]], numpy.float64)\n r0 = trans_matrix({'rotate': rotate, 'scale': scale, 'shear': shear})[0:3,0:3]\n r0 = numpy.linalg.solve(numpy.linalg.inv(r), numpy.linalg.inv(r0))\n rotate[1] = numpy.arcsin(_frone(r0[0,2]))\n if numpy.abs((numpy.abs(rotate[1]) - (numpy.pi / 2.0))) < 1.0e-6:\n rotate[0] = 0.0\n rotate[2] = numpy.arctan2(-_frone(r0[1,0]), _frone(-r0[2,0] / r0[0,2]))\n else:\n rc = numpy.cos(rotate[1])\n rotate[0] = numpy.arctan2(_frone(r0[1,2] / rc), _frone(r0[2,2] / rc))\n rotate[2] = numpy.arctan2(_frone(r0[0,1] / rc), _frone(r0[0,0] / rc))\n if was2d:\n trans = trans[1:]\n rotate = rotate[0:1]\n scale = scale[1:]\n shear = shear[2:3]\n return (trans, rotate, scale, shear)", "def complex_mul1d(a, b):\n op = partial(torch.einsum, \"bix,iox->box\")\n return torch.stack([\n op(a[..., 0], b[..., 0]) - op(a[..., 1], b[..., 1]),\n op(a[..., 1], b[..., 0]) + op(a[..., 0], b[..., 1])\n ],\n dim=-1)", "def batch_affine_warp2d(imgs, theta):\n n_batch = tf.shape(imgs)[0]\n xlen = tf.shape(imgs)[1]\n ylen = tf.shape(imgs)[2]\n theta = tf.reshape(theta, [-1, 2, 3])\n matrix = tf.slice(theta, [0, 0, 0], [-1, -1, 2])\n t = tf.slice(theta, [0, 0, 2], [-1, -1, -1])\n\n grids = batch_mgrid(n_batch, xlen, ylen)\n coords = tf.reshape(grids, [n_batch, 2, -1])\n\n T_g = tf.batch_matmul(matrix, coords) + t\n T_g = tf.reshape(T_g, [n_batch, 2, xlen, ylen])\n output = batch_warp2d(imgs, T_g)\n return output", "def get_translation_matrix2d(translations: Tensor) -> Tensor:\n transform: Tensor = eye_like(3, translations)[:, :2, :]\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n return transform_h", "def transformation_2d(vertices, kernels=KERNELS):\n\t# calculate the transpose matrix of vertices\n\ttranspose = vertices.transpose()\n\t# insert a row of ones in the transpose matrix's end, then insert the result in 'matrices' list\n\tkernels.append(np.append(transpose, [np.ones(len(transpose[0]))], axis=0))\n\t# multiply matrices into 'kernels' list,\n\t# remove the last row (of ones) and calculate the transpose matrix of the result\n\tfinal_transformation_result = np.delete(np.linalg.multi_dot(kernels), 2, 0).transpose()\n\tKERNELS.clear()\n\treturn final_transformation_result", "def gen_affine_map(Ab, img_sz, dim=3):\n Ab = Ab.view(Ab.shape[0], dim+1, dim)\n phi = gen_identity_map(img_sz).to(Ab.device)\n phi_cp = phi.view(dim, -1)\n affine_map = torch.matmul(Ab[:, :dim, :], phi_cp)\n affine_map = Ab[:, dim, :].contiguous().view(-1, dim, 1) + affine_map\n affine_map = affine_map.view([Ab.shape[0]] + list(phi.shape))\n return affine_map", "def bs_densmatrix_transform(input_matrix, t, r):\n size = len(input_matrix)\n output_matrix = np.zeros((size*2,) * 4, dtype=complex)\n\n for p1 in range(size):\n for p2 in range(size):\n for p1_ in range(size):\n for p2_ in range(size):\n\n for n in range(p1 + 1):\n for k in range(p2 + 1):\n for n_ in range(p1_ + 1):\n for k_ in range(p2_ + 1):\n d1 = p1 - n + k\n d2 = n + p2 - k\n d1_ = p1_ - n_ + k_\n d2_ = n_ + p2_ - k_\n\n coeff1 = t**(p1 - n + p2 - k) * (1j*r)**(n + k) * sqrt(factorial(d1) * factorial(d2) * factorial(p1) * factorial(p2)) / (factorial(n) * factorial(p1 - n) * factorial(k) * factorial(p2 - k))\n coeff2 = t**(p1_ - n_ + p2_ - k_) * (-1j*r)**(n_ + k_) * sqrt(factorial(d1_) * factorial(d2_) * factorial(p1_) * factorial(p2_)) / (factorial(n_) * factorial(p1_ - n_) * factorial(k_) * factorial(p2_ - k_))\n output_matrix[d1, d2, d1_, d2_] = output_matrix[d1, d2, d1_, d2_] + input_matrix[p1, p2, p1_, p2_] * coeff1 * coeff2\n\n return output_matrix", "def affine_trans(self):\n h, w, _ = self.img.shape\n\n \"\"\"\n pts1 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n pts2 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n \"\"\"\n\n pts1 = np.float32([[50, 50], [200, 50], [50, 200]])\n pts2 = np.float32([[10, 100], [200, 50], [100, 250]])\n\n M = cv2.getAffineTransform(pts1, pts2)\n\n self.img = cv2.warpAffine(self.img, M, (w, h))\n\n self.edits.append(\"affine\")\n return self", "def transformation_matrix(self, s1, s2, s3, t1, t2, t3):\n\n s1 = np.array(s1)\n s2 = np.array(s2)\n s3 = np.array(s3)\n t1 = np.array(t1)\n t2 = np.array(t2)\n t3 = np.array(t3)\n\n Q = np.array(\n [\n [t2[0] - t1[0], t2[1] - t1[1], t2[2] - t1[2]],\n [t3[0] - t1[0], t3[1] - t1[1], t3[2] - t1[2]],\n ]\n )\n\n P = np.array([[s2[0] - s1[0], s2[1] - s1[1]], [s3[0] - s1[0], s3[1] - s1[1]]])\n\n try:\n # Invert the P matrix\n Pinv = inv(P)\n\n # Build the dot product\n T = np.dot(Pinv, Q)\n\n # Offset\n V0 = np.subtract(t2, np.transpose(s2[0:2]).dot(T))\n except Exception as e:\n self.log.error(\"An error occured during the transformation.\", exc_info=True)\n return -1, -1\n\n return T, V0", "def _proj(u,v):\n return (np.einsum('i...,i...->...',u,v)/np.einsum('i...,i...->...',u,u))*u", "def _transform(\n self, x: \"torch.Tensor\", y: Optional[\"torch.Tensor\"], **kwargs\n ) -> Tuple[\"torch.Tensor\", Optional[\"torch.Tensor\"]]:\n import torch\n import torchvision.transforms.functional as F\n\n img_size = x.shape[:2]\n\n angle = float(\n torch.empty(1)\n .uniform_(float(self.degree_range[0]), float(self.degree_range[1]))\n .item()\n )\n\n max_dx = float(self.translate[0] * img_size[1])\n max_dy = float(self.translate[1] * img_size[0])\n tx = int(round(torch.empty(1).uniform_(-max_dx, max_dx).item()))\n ty = int(round(torch.empty(1).uniform_(-max_dy, max_dy).item()))\n translations = (tx, ty)\n\n scale = float(torch.empty(1).uniform_(self.scale[0], self.scale[1]).item())\n\n # x needs to have channel first\n x = x.permute(2, 0, 1)\n x = F.affine(\n img=x, angle=angle, translate=translations, scale=scale, shear=(0.0, 0.0)\n )\n x = x.permute(1, 2, 0)\n\n return torch.clamp(x, min=self.clip_values[0], max=self.clip_values[1]), y", "def get_projective_transform(center: Tensor, angles: Tensor, scales: Tensor) -> Tensor:\n if not (len(center.shape) == 2 and center.shape[-1] == 3):\n raise AssertionError(center.shape)\n if not (len(angles.shape) == 2 and angles.shape[-1] == 3):\n raise AssertionError(angles.shape)\n if center.device != angles.device:\n raise AssertionError(center.device, angles.device)\n if center.dtype != angles.dtype:\n raise AssertionError(center.dtype, angles.dtype)\n\n # create rotation matrix\n axis_angle_rad: Tensor = deg2rad(angles)\n rmat: Tensor = axis_angle_to_rotation_matrix(axis_angle_rad) # Bx3x3\n scaling_matrix: Tensor = eye_like(3, rmat)\n scaling_matrix = scaling_matrix * scales.unsqueeze(dim=1)\n rmat = rmat @ scaling_matrix.to(rmat)\n\n # define matrix to move forth and back to origin\n from_origin_mat = eye_like(4, rmat, shared_memory=False) # Bx4x4\n from_origin_mat[..., :3, -1] += center\n\n to_origin_mat = from_origin_mat.clone()\n to_origin_mat = _torch_inverse_cast(from_origin_mat)\n\n # append translation with zeros\n proj_mat = projection_from_Rt(rmat, torch.zeros_like(center)[..., None]) # Bx3x4\n\n # chain 4x4 transforms\n proj_mat = convert_affinematrix_to_homography3d(proj_mat) # Bx4x4\n proj_mat = from_origin_mat @ proj_mat @ to_origin_mat\n\n return proj_mat[..., :3, :] # Bx3x4", "def test_transform_2d(transform, alpha = 1):\r\n points = 20*[None]\r\n for i in range(20):\r\n x = random.randrange(-40, 41)\r\n y = random.randrange(-40, 41)\r\n points[i] = vec2(x, y)\r\n tr_x = random.randrange(-40, 41)\r\n tr_y = random.randrange(-40, 41)\r\n mapping = [(p, vec2(p.x + tr_x, p.y + tr_y)) for p in points]\r\n print(\"Translation\")\r\n print(\"Input\".ljust(20), \"Translation\".ljust(20), \"Transformation\".ljust(20))\r\n for i in range(20):\r\n x = random.randrange(-40, 41)\r\n y = random.randrange(-40, 41)\r\n v_in = vec2(x, y)\r\n v_translate = vec2(x + tr_x, y + tr_y)\r\n v_transform = transform(v_in, mapping, alpha)\r\n print(str(v_in).ljust(20), str(v_translate.str_repr(4)).ljust(20), str(v_transform.str_repr(4)).ljust(20))\r\n print()\r\n th = 2*math.pi*random.random()\r\n mapping = [(p, vec2(p.x*math.cos(th) - p.y*math.sin(th), p.x*math.sin(th) + p.y*math.cos(th))) for p in points]\r\n print(\"Rotation\")\r\n print(\"Input\".ljust(20), \"Rotation\".ljust(20), \"Transformation\".ljust(20))\r\n for i in range(20):\r\n x = random.randrange(-40, 41)\r\n y = random.randrange(-40, 41)\r\n v_in = vec2(x, y)\r\n v_rotate = vec2(x*math.cos(th) - y*math.sin(th), x*math.sin(th) + y*math.cos(th))\r\n v_transform = transform(v_in, mapping, alpha)\r\n print(str(v_in).ljust(20), str(v_rotate.str_repr(4)).ljust(20), str(v_transform.str_repr(4)).ljust(20))\r\n print()\r\n k = math.exp(2*random.random() - 1)\r\n mapping = [(p, vec2(k*p.x, k*p.y)) for p in points]\r\n print(\"Uniform scaling\")\r\n print(\"Input\".ljust(20), \"Scaling\".ljust(20), \"Transformation\".ljust(20))\r\n for i in range(20):\r\n x = random.randrange(-40, 41)\r\n y = random.randrange(-40, 41)\r\n v_in = vec2(x, y)\r\n v_scale = vec2(k*x, k*y)\r\n v_transform = transform(v_in, mapping, alpha)\r\n print(str(v_in).ljust(20), str(v_scale.str_repr(4)).ljust(20), str(v_transform.str_repr(4)).ljust(20))\r\n print()\r\n k_x = math.exp(2*random.random() - 1)\r\n k_y = 3*random.random() + 1\r\n if (k_x >= k_y + math.exp(-1)): k_y = k_x - k_y\r\n else: k_y = k_x + k_y\r\n mapping = [(p, vec2(k_x*p.x, k_y*p.y)) for p in points]\r\n print(\"Non-uniform scaling\")\r\n print(\"Input\".ljust(20), \"Scaling\".ljust(20), \"Transformation\".ljust(20))\r\n for i in range(20):\r\n x = random.randrange(-40, 41)\r\n y = random.randrange(-40, 41)\r\n v_in = vec2(x, y)\r\n v_scale = vec2(k_x*x, k_y*y)\r\n v_transform = transform(v_in, mapping, alpha)\r\n print(str(v_in).ljust(20), str(v_scale.str_repr(4)).ljust(20), str(v_transform.str_repr(4)).ljust(20))\r\n print()", "def similarity_transformation(rot, mat):\n return np.dot(rot, np.dot(mat, np.linalg.inv(rot)))", "def similarity_transformation(rot, mat):\n return np.dot(rot, np.dot(mat, np.linalg.inv(rot)))", "def estimate_rigid_transform(points1, points2, translation_only=False):\n centroid1 = points1.mean(axis=0)\n centroid2 = points2.mean(axis=0)\n\n if translation_only:\n rotation = np.eye(2)\n translation = centroid2 - centroid1\n\n else:\n centered_points1 = points1 - centroid1\n centered_points2 = points2 - centroid2\n\n sigma = centered_points2.T @ centered_points1\n U, _, Vt = np.linalg.svd(sigma)\n\n rotation = U @ Vt\n translation = -rotation @ centroid1 + centroid2\n\n H = np.eye(3)\n H[:2, :2] = rotation\n H[:2, 2] = translation\n return H", "def estimate_rigid_transform(points1, points2, translation_only=False):\n centroid1 = points1.mean(axis=0)\n centroid2 = points2.mean(axis=0)\n\n if translation_only:\n rotation = np.eye(2)\n translation = centroid2 - centroid1\n\n else:\n centered_points1 = points1 - centroid1\n centered_points2 = points2 - centroid2\n\n sigma = centered_points2.T @ centered_points1\n U, _, Vt = np.linalg.svd(sigma)\n\n rotation = U @ Vt\n translation = -rotation @ centroid1 + centroid2\n\n H = np.eye(3)\n H[:2, :2] = rotation\n H[:2, 2] = translation\n return H", "def similarity_transform_2d(v, mapping, alpha = 1):\r\n p_wgt = vec2(0, 0)\r\n q_wgt = vec2(0, 0)\r\n w = len(mapping)*[None]\r\n w_sum = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n x = mp[0].x - v.x\r\n y = mp[0].y - v.y\r\n if (x == 0 and y == 0): return mp[1]\r\n w[i] = 1/((x*x + y*y) ** alpha)\r\n p_wgt += mp[0]*w[i]\r\n q_wgt += mp[1]*w[i]\r\n w_sum += w[i]\r\n p_wgt /= w_sum\r\n q_wgt /= w_sum\r\n mu = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n mu += w[i]*(p_adj.dot(p_adj))\r\n A_fac = mat2([v.x - p_wgt.x, v.y - p_wgt.y, v.y - p_wgt.y, p_wgt.x - v.x])\r\n v_out = vec2(0, 0)\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n q_adj = mp[1] - q_wgt\r\n A = mat2([p_adj.x, p_adj.y, p_adj.y, -p_adj.x])*A_fac*w[i]\r\n A = A.transpose()\r\n v_out += A*q_adj/mu\r\n v_out += q_wgt\r\n return v_out", "def apply_transformation(self, points):\n assert (points.shape[0] == 3)\n n = points.shape[1]\n points_ = np.vstack((points, np.ones((1, n))))\n points_trans_ = np.matmul(self.pose_mat, points_)\n points_transformed = np.true_divide(points_trans_[:3, :], points_trans_[[-1], :])\n return points_transformed", "def transforms_multiply(t0s, t1s):\r\n \r\n return ut.matrix_multiply(t0s, t1s)", "def _dot_product_attention_inner_relative(x, y, z, transpose):\n batch_size, heads, length, _ = x.size()\n\n # xy_matmul is [batch_size, heads, length, length or depth]\n xy_matmul = torch.matmul(x, y if not transpose else y.transpose(-2, -1))\n # x_t is [length, batch_size, heads, length or depth]\n x_t = x.permute(2, 0, 1, 3)\n # x_t_r is [length, batch_size * heads, length or depth]\n x_t_r = x_t.view(length, batch_size * heads, -1)\n # x_tz_matmul is [length, batch_size * heads, length or depth]\n x_tz_matmul = torch.matmul(x_t_r, z if not transpose else z.transpose(-2, -1))\n # x_tz_matmul_r is [length, batch_size, heads, length or depth]\n x_tz_matmul_r = x_tz_matmul.view(length, batch_size, heads, -1)\n # x_tz_matmul_r_t is [batch_size, heads, length, length or depth]\n x_tz_matmul_r_t = x_tz_matmul_r.permute(1, 2, 0, 3)\n\n return xy_matmul + x_tz_matmul_r_t", "def getAffineTransform(self, coord1, coord2):\n # generate coord1 into A\n mat_A = np.zeros((2*coord1.shape[0], 6))\n coord1 = np.hstack([coord1, np.ones((coord1.shape[0], 1))])\n for i in range(coord1.shape[0]):\n row = coord1[i,:]\n row_block = block_diag(row, row)\n assert(row_block.shape == (2,6))\n mat_A[2*i:2*i+2, :] = row_block\n \n # generate coord2 into b\n vec_b = coord2.reshape(-1,1)\n\n # solve the least square\n pseudo_inv = np.linalg.inv(np.matmul(mat_A.T, mat_A))\n pseudo_inv = np.matmul(pseudo_inv, mat_A.T)\n affine_mat = np.matmul(pseudo_inv, vec_b)\n assert(affine_mat.shape == (6,1))\n \n return affine_mat.reshape(2,-1)", "def affine_to_shift(affine_matrix, volshape, shift_center=True, indexing='ij'):\n\n if isinstance(volshape, (tf.compat.v1.Dimension, tf.TensorShape)):\n volshape = volshape.as_list()\n \n if affine_matrix.dtype != 'float32':\n affine_matrix = tf.cast(affine_matrix, 'float32')\n\n nb_dims = len(volshape)\n\n if len(affine_matrix.shape) == 1:\n if len(affine_matrix) != (nb_dims * (nb_dims + 1)):\n raise ValueError('transform is supposed a vector of len ndims * (ndims + 1).'\n 'Got len %d' % len(affine_matrix))\n\n affine_matrix = tf.reshape(affine_matrix, [nb_dims, nb_dims + 1])\n\n if not (affine_matrix.shape[0] in [nb_dims, nb_dims + 1] and affine_matrix.shape[1] == (nb_dims + 1)):\n shape1 = '(%d x %d)' % (nb_dims + 1, nb_dims + 1)\n shape2 = '(%d x %s)' % (nb_dims, nb_dims + 1)\n true_shape = str(affine_matrix.shape)\n raise Exception('Affine shape should match %s or %s, but got: %s' % (shape1, shape2, true_shape))\n\n # list of volume ndgrid\n # N-long list, each entry of shape volshape\n mesh = ne.utils.volshape_to_meshgrid(volshape, indexing=indexing) \n mesh = [tf.cast(f, 'float32') for f in mesh]\n \n if shift_center:\n mesh = [mesh[f] - (volshape[f]-1)/2 for f in range(len(volshape))]\n\n # add an all-ones entry and transform into a large matrix\n flat_mesh = [ne.utils.flatten(f) for f in mesh]\n flat_mesh.append(tf.ones(flat_mesh[0].shape, dtype='float32'))\n mesh_matrix = tf.transpose(tf.stack(flat_mesh, axis=1)) # 4 x nb_voxels\n\n # compute locations\n loc_matrix = tf.matmul(affine_matrix, mesh_matrix) # N+1 x nb_voxels\n loc_matrix = tf.transpose(loc_matrix[:nb_dims, :]) # nb_voxels x N\n loc = tf.reshape(loc_matrix, list(volshape) + [nb_dims]) # *volshape x N\n # loc = [loc[..., f] for f in range(nb_dims)] # N-long list, each entry of shape volshape\n\n # get shifts and return\n return loc - tf.stack(mesh, axis=nb_dims)", "def affine_transform(x, output_dim, name=None):\n\n w = tf.get_variable(name + \"_w\", [x.get_shape()[1], output_dim], initializer=tf.truncated_normal_initializer(stddev=0.02))\n b = tf.get_variable(name + \"_b\", [output_dim], initializer=tf.constant_initializer(0.0))\n\n return tf.matmul(x, w) + b", "def img_map_transforms(ts):\n # XXX TODO: unchecked textures give error of variable referenced before assignment XXX\n # POV-Ray \"scale\" is not a number of repetitions factor, but ,its\n # inverse, a standard scale factor.\n # 0.5 Offset is needed relatively to scale because center of the\n # scale is 0.5,0.5 in blender and 0,0 in POV\n # Strange that the translation factor for scale is not the same as for\n # translate.\n # TODO: verify both matches with other blender renderers / internal in previous versions.\n image_map_transforms = \"\"\n image_map_transforms = \"scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>\" % (\n ts.scale[0],\n ts.scale[1],\n ts.scale[2],\n ts.offset[0],\n ts.offset[1],\n ts.offset[2],\n )\n # image_map_transforms = (\" translate <-0.5,-0.5,0.0> scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>\" % \\\n # ( 1.0 / ts.scale.x,\n # 1.0 / ts.scale.y,\n # 1.0 / ts.scale.z,\n # (0.5 / ts.scale.x) + ts.offset.x,\n # (0.5 / ts.scale.y) + ts.offset.y,\n # ts.offset.z))\n # image_map_transforms = (\n # \"translate <-0.5,-0.5,0> \"\n # \"scale <-1,-1,1> * <%.4g,%.4g,%.4g> \"\n # \"translate <0.5,0.5,0> + <%.4g,%.4g,%.4g>\" % \\\n # (1.0 / ts.scale.x,\n # 1.0 / ts.scale.y,\n # 1.0 / ts.scale.z,\n # ts.offset.x,\n # ts.offset.y,\n # ts.offset.z)\n # )\n return image_map_transforms", "def make_translations(dataset, labels):\n offset = 10\n translations = [\n (0, offset),\n (0, -offset),\n (offset, 0),\n (-offset, 0),\n (-offset, -offset),\n (-offset, offset),\n (offset, -offset),\n (offset, offset)\n ]\n\n was_flattened = (len(dataset[0].shape) == 1)\n augmented_dataset = []\n augmented_labels = []\n \n for image, label in zip(dataset, labels):\n if was_flattened:\n image = unflatten(image)\n \n height = image.shape[0]\n width = image.shape[1]\n \n for t_x, t_y in translations:\n new_image = np.zeros(image.shape)\n t_mat = np.array([[1,0,t_x],[0,1,t_y],[0,0,1]])\n\n for x in range(0, width):\n for y in range(0, height):\n old_coords = np.array([[x],[y],[1]])\n new_coords = t_mat.dot(old_coords) # translation here\n\n if new_coords[0] > 0 and new_coords[0] < width and new_coords[1] > 0 and new_coords[1] < height:\n new_image[new_coords[1], new_coords[0]] = image[y, x]\n \n if was_flattened:\n new_image.flatten()\n augmented_dataset.append(new_image)\n augmented_labels.append(label)\n\n return (augmented_dataset, augmented_labels)", "def matmul(x, y):\n return np.matmul(x, y)", "def TransformPoint(transform, x, y, z):\n result = np.matmul(transform, np.array([x, y, z, 1.]))\n return result[0], result[1], result[2]", "def get_perspective_transform(src, dst):\n if not isinstance(src, torch.Tensor):\n raise TypeError(\"Input type is not a torch.Tensor. Got {}\".format(type(src)))\n\n if not isinstance(dst, torch.Tensor):\n raise TypeError(\"Input type is not a torch.Tensor. Got {}\".format(type(dst)))\n\n if not src.shape[-2:] == (4, 2):\n raise ValueError(\"Inputs must be a Bx4x2 tensor. Got {}\".format(src.shape))\n\n if not src.shape == dst.shape:\n raise ValueError(\"Inputs must have the same shape. Got {}\".format(dst.shape))\n\n if not (src.shape[0] == dst.shape[0]):\n raise ValueError(\n \"Inputs must have same batch size dimension. Expect {} but got {}\".format(src.shape, dst.shape)\n )\n\n # we build matrix A by using only 4 point correspondence. The linear\n # system is solved with the least square method, so here\n # we could even pass more correspondence\n p = []\n for i in [0, 1, 2, 3]:\n p.append(_build_perspective_param(src[:, i], dst[:, i], 'x'))\n p.append(_build_perspective_param(src[:, i], dst[:, i], 'y'))\n\n # A is Bx8x8\n A = torch.stack(p, dim=1)\n\n # b is a Bx8x1\n b = torch.stack(\n [\n dst[:, 0:1, 0],\n dst[:, 0:1, 1],\n dst[:, 1:2, 0],\n dst[:, 1:2, 1],\n dst[:, 2:3, 0],\n dst[:, 2:3, 1],\n dst[:, 3:4, 0],\n dst[:, 3:4, 1],\n ],\n dim=1,\n )\n\n # solve the system Ax = b\n X, LU = _torch_solve_cast(b, A)\n\n # create variable to return\n batch_size = src.shape[0]\n M = torch.ones(batch_size, 9, device=src.device, dtype=src.dtype)\n M[..., :8] = torch.squeeze(X, dim=-1)\n\n return M.view(-1, 3, 3) # Bx3x3", "def convert_matmul(g, op, block):\n\n inputs = [g.get_node(op.input(\"X\")[0]), g.get_node(op.input(\"Y\")[0])]\n a_shape = infer_shape(inputs[0])\n b_shape = infer_shape(inputs[1])\n if op.has_attr(\"trans_x\"):\n # for matmul_v2\n trans_x = op.attr(\"trans_x\")\n trans_y = op.attr(\"trans_y\")\n else:\n # for matmul\n trans_x = op.attr(\"transpose_X\")\n trans_y = op.attr(\"transpose_Y\")\n if trans_x:\n perm = list(range(len(a_shape)))\n perm[-2] = len(a_shape) - 1\n perm[-1] = len(a_shape) - 2\n inputs[0] = _op.transpose(inputs[0], axes=perm)\n if trans_y:\n perm = list(range(len(b_shape)))\n perm[-2] = len(b_shape) - 1\n perm[-1] = len(b_shape) - 2\n inputs[1] = _op.transpose(inputs[1], axes=perm)\n\n # This implemention almost keeps same with ONNX\n # Need to check input shape as batch matmul must be supported.\n a_shape = shape_of(inputs[0], dtype=\"int32\")\n a_rank = infer_shape(a_shape)[0]\n b_shape = shape_of(inputs[1], dtype=\"int32\")\n b_rank = infer_shape(b_shape)[0]\n # When performing a batch matmul, we need to properly handle N-dim shapes.\n if a_rank > 2 or b_rank > 2:\n\n def flatten_to_nd(x, x_shape, nd=3):\n ndims = infer_shape(x_shape)[0]\n if ndims == nd:\n return x\n newshape = _op.concatenate(\n [\n _expr.const([-1], dtype=infer_type(x_shape).checked_type.dtype),\n _op.strided_slice(x_shape, [ndims - nd + 1], [ndims]),\n ],\n 0,\n )\n out = _op.reshape(x, fold_constant(newshape))\n return out\n\n b_type = infer_type(inputs[1])\n # Convert to dense if the second matrix is 2d and non-dynamic\n if b_rank == 2 and not _ty.is_dynamic(b_type.checked_type):\n a = flatten_to_nd(inputs[0], a_shape, 2)\n b = _op.transpose(inputs[1])\n output = _op.nn.dense(a, b)\n else:\n # Convert a and b into 3 dimensional tensors.\n a = flatten_to_nd(inputs[0], a_shape, 3)\n b = flatten_to_nd(inputs[1], b_shape, 3)\n # Transpose matrix dimensions of b.\n b = _op.transpose(b, [0, 2, 1])\n # Perform a batch matmul.\n output = _op.nn.batch_matmul(a, b)\n # Determine the output batch dimension.\n if a_rank > b_rank:\n out_batch = _op.strided_slice(a_shape, [0], [a_rank - 2])\n elif a_rank < b_rank:\n out_batch = _op.strided_slice(b_shape, [0], [b_rank - 2])\n # If its unclear how broadcasting should be applied, the output\n # shape is determined by choosing the maximum value from each input.\n else:\n out_batch = _op.concatenate(\n [\n _op.maximum(\n _op.strided_slice(a_shape, [i], [i + 1]),\n _op.strided_slice(b_shape, [i], [i + 1]),\n )\n for i in range(a_rank - 2)\n ],\n 0,\n )\n # Reshape output to original dimensions.\n final_shape = _op.concatenate(\n [\n out_batch,\n _op.strided_slice(\n a_shape, [infer_shape(a_shape)[0] - 2], [infer_shape(a_shape)[0] - 1]\n ),\n _op.strided_slice(\n b_shape, [infer_shape(b_shape)[0] - 1], [infer_shape(b_shape)[0]]\n ),\n ],\n 0,\n )\n out = _op.reshape(output, fold_constant(final_shape))\n else:\n if b_rank == 1:\n inputs[1] = _op.expand_dims(inputs[1], 1, 1)\n # Otherwise a simple dense op will get the job done.\n input_1_t = _op.transpose(inputs[1], axes=(1, 0))\n out = _op.nn.dense(inputs[0], input_1_t)\n if b_rank == 1:\n out = _op.squeeze(out, axis=[-1])\n if op.has_attr(\"alpha\"):\n alpha = op.attr(\"alpha\")\n if not np.isclose(alpha, 1.0):\n out = out * _expr.const(alpha).astype(\"float32\")\n g.add_node(op.output(\"Out\")[0], out)", "def small_transf(x, y, a, xc, yc, ac, iref):\n x += xc - x[iref]\n y += yc - y[iref]\n x, y = rotate_xy(x, y, x[iref], y[iref], ac - a[iref])\n a += ac - a[iref]\n return x, y, a", "def affine(params, x):\n return np.dot(params['w'], x) + params['b']", "def apply_transform_matrix(self, img: np.ndarray, transform_matrix):\n h, w = img.shape[0], img.shape[1]\n transform_matrix = transform_matrix_offset_center(transform_matrix, h, w)\n img = np.rollaxis(img, 2, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n\n channel_images = [scipy.ndimage.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=1,\n mode=self.fill_mode,\n cval=self.cval) for x_channel in img]\n img = np.stack(channel_images, axis=0)\n img = np.rollaxis(img, 0, 2 + 1)\n # img = apply_affine_transform(img, transform_matrix, channel_axis=2, fill_mode=self.fill_mode, cval=self.cval) # apply_transform\n return img", "def rescale(A, d1, d2):\n \n A[0, 1] = A[0, 1] * (d2 / d1)\n A[1, 0] = A[1, 0] * (d1 / d2)\n \n return A", "def einsum(ops, *args):\n\n if len(args) != 2:\n raise ValueError(\"Currently only two operands are supported\")\n\n inops, outops = ops.split('->')\n inops = inops.split(',')\n\n # All indices that are in input AND in output are multiplies\n multiplies = sorted(list(set(inops[0]) & set(inops[1]) & set(outops)))\n # All indices that are in input BUT NOT in output are sum contractions\n sums = sorted(list((set(inops[0]) & set(inops[1])) - set(outops)))\n\n # Map sums and indices to axis integers\n multiplies = [[inop.find(x) for x in multiplies] for inop in inops]\n sums = [[inop.find(x) for x in sums] for inop in inops]\n\n # Find output axes in input axes for final transpose\n # Values very likely lie outside of output tensor shape, so\n # just map them values to their rank (index in ordered list)\n transpose = [''.join(inops).find(x) for x in outops]\n transpose = scipy.stats.rankdata(transpose).astype(int) - 1\n\n return tensordot2(*args, sum=sums, multiply=multiplies).transpose(transpose)", "def compose(scales, offsets, rotations, origin=None):\n\n preRotate = np.eye(4)\n postRotate = np.eye(4)\n\n rotations = np.array(rotations)\n\n if rotations.shape == (3,):\n rotations = axisAnglesToRotMat(*rotations)\n\n if origin is not None:\n preRotate[ 0, 3] = -origin[0]\n preRotate[ 1, 3] = -origin[1]\n preRotate[ 2, 3] = -origin[2]\n postRotate[0, 3] = origin[0]\n postRotate[1, 3] = origin[1]\n postRotate[2, 3] = origin[2]\n\n scale = np.eye(4, dtype=np.float64)\n offset = np.eye(4, dtype=np.float64)\n rotate = np.eye(4, dtype=np.float64)\n\n scale[ 0, 0] = scales[ 0]\n scale[ 1, 1] = scales[ 1]\n scale[ 2, 2] = scales[ 2]\n offset[ 0, 3] = offsets[0]\n offset[ 1, 3] = offsets[1]\n offset[ 2, 3] = offsets[2]\n\n rotate[:3, :3] = rotations\n\n return concat(offset, postRotate, rotate, preRotate, scale)", "def augment_translate(image, mask, trans_range):\n\n tr_x = trans_range * np.random.uniform() - trans_range / 2\n tr_y = trans_range * np.random.uniform() - trans_range / 2\n trans_M = np.array([[1, 0, tr_x], [0, 1, tr_y]], dtype=np.float32)\n width, height, _ = image.shape\n image = cv2.warpAffine(image, trans_M, (width, height))\n if mask is not None:\n mask = cv2.warpAffine(mask, trans_M, (width, height))\n return image, mask", "def estimate_translation_np(S, joints_2d, joints_conf, focal_length=5000, img_size=224):\n\n num_joints = S.shape[0]\n # focal length\n f = np.array([focal_length, focal_length])\n # optical center\n center = np.array([img_size / 2., img_size / 2.])\n\n # transformations\n Z = np.reshape(np.tile(S[:, 2], (2, 1)).T, -1)\n XY = np.reshape(S[:, 0:2], -1)\n O = np.tile(center, num_joints)\n F = np.tile(f, num_joints)\n weight2 = np.reshape(np.tile(np.sqrt(joints_conf), (2, 1)).T, -1)\n\n # least squares\n Q = np.array([F * np.tile(np.array([1, 0]), num_joints), F * np.tile(np.array([0, 1]), num_joints),\n O - np.reshape(joints_2d, -1)]).T\n c = (np.reshape(joints_2d, -1) - O) * Z - F * XY\n\n # weighted least squares\n W = np.diagflat(weight2)\n Q = np.dot(W, Q)\n c = np.dot(W, c)\n\n # square matrix\n A = np.dot(Q.T, Q)\n b = np.dot(Q.T, c)\n\n # solution\n trans = np.linalg.solve(A, b)\n\n return trans", "def scale_rotate_translate_coords(\n coords: np.array,\n m: np.array\n):\n return np.dot(\n m, np.vstack((coords.transpose(), np.ones(len(coords))))\n ).transpose()[:, :2]", "def translate(img, shift):\n\tgray = grayscale(img)\n\ttmp = img.copy()\n\trows, cols = gray.shape\n\tM = np.float32([[1, 0, shift[0]], [0, 1, shift[1]]]) # Translation Matrix\n\tdst = cv2.warpAffine(tmp, M, (cols, rows))\n\treturn dst", "def fit(self,data):\n A = data[:,0:self.dim]\n B = data[:,self.dim:]\n\n #Procrustean: CODE TO SOLVE FOR OPTIMAL (by least squares) EUCLIDEAN TRANSFORM (currently allows reflections):\n bReflection = True\n\n muA = np.mean(A,axis=0)\n muB = np.mean(B,axis=0)\n A0 = A - np.tile(muA,[A.shape[0],1])\n B0 = B - np.tile(muB,[B.shape[0],1])\n\n ssqA = np.sum(A0**2)\n ssqB = np.sum(B0**2)\n normA = np.sqrt(ssqA)\n normB = np.sqrt(ssqB)\n\n A0 = A0 / normA\n B0 = B0 / normB\n\n X = np.dot(A0.T,B0)\n [L, D, M] = np.linalg.svd(X)\n rotation = np.dot(M.T,L.T)\n\n traceTA = sum(D)\n if self.bScale:\n scale = traceTA * normA / normB\n else:\n scale = 1\n translation = muA - np.dot(scale*muB,rotation); \n\n self.scale = scale\n self.rot = rotation\n self.trans = translation\n return scale,rotation,translation\n #A - (np.dot((scale * B),rotation) + translation)", "def transform_space(normal_map, rotmat):\n rotmat = np.array(rotmat)\n orig_shape = normal_map.shape\n normal = normal_map.reshape(-1, 3).T # 3-by-N\n\n normal_trans = rotmat.dot(normal)\n\n normal_map_trans = normal_trans.T.reshape(orig_shape)\n return normal_map_trans", "def translate(geom, xoff=0.0, yoff=0.0, zoff=0.0):\n matrix = (1.0, 0.0, 0.0,\n 0.0, 1.0, 0.0,\n 0.0, 0.0, 1.0,\n xoff, yoff, zoff)\n return affine_transform(geom, matrix)", "def L1Uv2(A, d):\n n = shape(A)[0]\n for k in range(1,n):\n km = array([0, k - d]).max() # First index of r we need to update\n for r in range(km, k - 1):\n A[k, r] /= A[r, r]\n uk = array([k, r + d + 1]).min() # last index not included\n A[k, (r + 1):uk] -= A[r, (r + 1):uk]*A[k, r]\n A[k, k - 1] /= A[k - 1,k - 1] \n for r in range(km, k):\n uk = array([k + 1, r + d + 1]).min() # last index not included\n A[(r + 1):uk, k] -= A[(r + 1):uk, r]*A[r, k]", "def mca_transformer(transform_data):\n M, dims, index, v0v1 = transform_data\n def transform(dfp):\n # dims, index, v0v1\n P = np.zeros((len(dfp), dims), dtype=float)\n print(\"transforming\")\n for i, (_, row) in (enumerate(dfp.iterrows())):\n ivec = np.zeros(M)\n for col, val in zip(row.index, row):\n if (col, val) in index:\n ivec[index[col, val]] = 1\n proj = ivec.dot(v0v1)\n assert(all(proj.imag == 0))\n P[i,:] = proj.real\n return P\n return transform", "def transform_image(image, transform, mapping, alpha = 1, incr_x = 10, incr_y = 10):\r\n background = [255, 255, 255, 0]\r\n width, height = image.size\r\n image_in = np.array(image.convert(\"RGBA\"))\r\n image_out = [[background[:] for j in range(width)] for i in range(height)]\r\n transform_row = []\r\n for i in range(0, width + incr_x, incr_x):\r\n transform_row.append(transform(vec2(i, 0), mapping, alpha))\r\n for i in range(incr_y, height + incr_y, incr_y):\r\n p_ur = transform_row[0]\r\n p_lr = transform_row[0] = transform(vec2(0, i), mapping, alpha)\r\n for j in range(incr_x, width + incr_x, incr_x):\r\n p_ul = p_ur\r\n p_ll = p_lr\r\n p_ur = transform_row[j//incr_x]\r\n p_lr = transform_row[j//incr_x] = transform(vec2(j, i), mapping, alpha)\r\n a = p_ur - p_ul\r\n b = p_ll - p_ul\r\n det = a.x*b.y - a.y*b.x\r\n if (det != 0.0):\r\n for p in triangle(p_ul, p_ur, p_ll, width, height):\r\n c = p - p_ul\r\n rx = (b.y*c.x - b.x*c.y)/det\r\n ry = (a.x*c.y - a.y*c.x)/det\r\n image_out[p.y][p.x] = image_in[min(height - 1, max(0, round(i + (ry - 1)*incr_y)))][min(width - 1, max(0, round(j + (rx - 1)*incr_x)))]\r\n a = p_lr - p_ll\r\n b = p_lr - p_ur\r\n det = a.x*b.y - a.y*b.x\r\n if (det != 0.0):\r\n p_ulr = p_ur + p_ll - p_lr\r\n for p in triangle(p_ur, p_ll, p_lr, width, height):\r\n c = p - p_ulr\r\n rx = (b.y*c.x - b.x*c.y)/det\r\n ry = (a.x*c.y - a.y*c.x)/det\r\n image_out[p.y][p.x] = image_in[min(height - 1, max(0, round(i + (ry - 1)*incr_y)))][min(width - 1, max(0, round(j + (rx - 1)*incr_x)))]\r\n image_out = Image.fromarray(np.uint8(image_out))\r\n return image_out", "def compose_transforms(*transforms):\n from functools import reduce\n\n for transform in transforms:\n vg.shape.check(locals(), \"transform\", (4, 4))\n\n if len(transforms) == 0:\n return np.eye(4)\n\n return reduce(np.dot, reversed(transforms))", "def transform(pt, center, scale, res, invert=0, rot=0):\n t = get_transform_pose(center, scale, res, rot=rot)\n if invert:\n t = np.linalg.inv(t)\n new_pt = np.pad(pt,((0,0),(0,1)),mode = 'constant', constant_values = 1).T\n new_pt = np.dot(t, new_pt)\n return new_pt", "def apply(self,v):\n return np.tensordot(self._transform, v, axes=([1],[0])) \\\n + self._translation", "def TransformVector(self, *args):\n return _itkTranslationTransformPython.itkTranslationTransformD2_TransformVector(self, *args)", "def _inv22_vectorized(M):\n assert (M.ndim == 3)\n assert (M.shape[-2:] == (2, 2))\n M_inv = np.empty_like(M)\n delta_inv = np.reciprocal(M[:, 0, 0]*M[:, 1, 1] - M[:, 0, 1]*M[:, 1, 0])\n M_inv[:, 0, 0] = M[:, 1, 1]*delta_inv\n M_inv[:, 0, 1] = -M[:, 0, 1]*delta_inv\n M_inv[:, 1, 0] = -M[:, 1, 0]*delta_inv\n M_inv[:, 1, 1] = M[:, 0, 0]*delta_inv\n return M_inv", "def solve_rigid_transform(X, Y, debug=True):\n assert X.shape[0] == Y.shape[0] >= 3\n assert X.shape[1] == Y.shape[1] == 3\n A = X.T # (3,N)\n B = Y.T # (3,N)\n\n # Look for Inge Soderkvist's solution online if confused.\n meanA = np.mean(A, axis=1, keepdims=True)\n meanB = np.mean(B, axis=1, keepdims=True)\n A = A - meanA\n B = B - meanB\n covariance = B.dot(A.T)\n U, sigma, VH = np.linalg.svd(covariance) # VH = V.T, i.e. numpy transposes it for us.\n\n V = VH.T\n D = np.eye(3)\n D[2,2] = np.linalg.det( U.dot(V.T) )\n R = U.dot(D).dot(V.T)\n t = meanB - R.dot(meanA)\n RB_matrix = np.concatenate((R, t), axis=1)\n\n #################\n # SANITY CHECKS #\n #################\n\n print(\"\\nBegin debug prints for rigid transformation from A to B:\")\n print(\"meanA:\\n{}\\nmeanB:\\n{}\".format(meanA, meanB))\n print(\"Rotation R:\\n{}\\nand R^TR (should be identity):\\n{}\".format(R, (R.T).dot(R)))\n print(\"translation t:\\n{}\".format(t))\n print(\"RB_matrix:\\n{}\".format(RB_matrix))\n\n # Get residual to inspect quality of solution. Use homogeneous coordinates for A.\n # Also, recall that we're dealing with (3,N) matrices, not (N,3).\n # In addition, we don't want to zero-mean for real applications.\n A = X.T # (3,N)\n B = Y.T # (3,N)\n\n ones_vec = np.ones((1, A.shape[1]))\n A_h = np.concatenate((A, ones_vec), axis=0)\n B_pred = RB_matrix.dot(A_h)\n assert B_pred.shape == B.shape\n\n # Careful! Use raw_errors for the RF, but it will depend on pred-targ or targ-pred.\n raw_errors = B_pred - B # Use pred-targ, of shape (3,N)\n l2_per_example = np.sum((B-B_pred)*(B-B_pred), axis=0)\n frobenius_loss = np.mean(l2_per_example)\n\n if debug:\n print(\"\\nInput, A.T:\\n{}\".format(A.T))\n print(\"Target, B.T:\\n{}\".format(B.T))\n print(\"Predicted points:\\n{}\".format(B_pred.T))\n print(\"Raw errors, B-B_pred:\\n{}\".format((B-B_pred).T))\n print(\"Mean abs error per dim: {}\".format( (np.mean(np.abs(B-B_pred), axis=1))) )\n print(\"Residual (L2) for each:\\n{}\".format(l2_per_example.T))\n print(\"loss on data: {}\".format(frobenius_loss))\n print(\"End of debug prints for rigid transformation.\\n\")\n\n assert RB_matrix.shape == (3,4)\n return RB_matrix", "def get_rotation_matrix2d(center: Tensor, angle: Tensor, scale: Tensor) -> Tensor:\n if not isinstance(center, Tensor):\n raise TypeError(f\"Input center type is not a Tensor. Got {type(center)}\")\n\n if not isinstance(angle, Tensor):\n raise TypeError(f\"Input angle type is not a Tensor. Got {type(angle)}\")\n\n if not isinstance(scale, Tensor):\n raise TypeError(f\"Input scale type is not a Tensor. Got {type(scale)}\")\n\n if not (len(center.shape) == 2 and center.shape[1] == 2):\n raise ValueError(f\"Input center must be a Bx2 tensor. Got {center.shape}\")\n\n if not len(angle.shape) == 1:\n raise ValueError(f\"Input angle must be a B tensor. Got {angle.shape}\")\n\n if not (len(scale.shape) == 2 and scale.shape[1] == 2):\n raise ValueError(f\"Input scale must be a Bx2 tensor. Got {scale.shape}\")\n\n if not (center.shape[0] == angle.shape[0] == scale.shape[0]):\n raise ValueError(\n \"Inputs must have same batch size dimension. Got center {}, angle {} and scale {}\".format(\n center.shape, angle.shape, scale.shape\n )\n )\n\n if not (center.device == angle.device == scale.device) or not (center.dtype == angle.dtype == scale.dtype):\n raise ValueError(\n \"Inputs must have same device Got center ({}, {}), angle ({}, {}) and scale ({}, {})\".format(\n center.device, center.dtype, angle.device, angle.dtype, scale.device, scale.dtype\n )\n )\n\n shift_m = eye_like(3, center)\n shift_m[:, :2, 2] = center\n\n shift_m_inv = eye_like(3, center)\n shift_m_inv[:, :2, 2] = -center\n\n scale_m = eye_like(3, center)\n scale_m[:, 0, 0] *= scale[:, 0]\n scale_m[:, 1, 1] *= scale[:, 1]\n\n rotat_m = eye_like(3, center)\n rotat_m[:, :2, :2] = angle_to_rotation_matrix(angle)\n\n affine_m = shift_m @ rotat_m @ scale_m @ shift_m_inv\n return affine_m[:, :2, :] # Bx2x3", "def transform2h(self, x, y, m):\n A = torch.matmul(m, torch.stack([x, y, torch.ones(len(x))]))\n xt = A[0, :] / A[2, :]\n yt = A[1, :] / A[2, :]\n return xt, yt", "def two_bs2x4_transform_opt(t1, r1, t2, r2, input_state):\n size = len(input_state)\n out = np.zeros((size,) * 4, dtype=complex)\n\n def coef(k1, k2, k3, k4):\n return t1 ** k2 * (1j * r1) ** k1 * t2 ** k4 * (1j * r2) ** k3 / (factorial(k1) * factorial(k2) * factorial(k3) * factorial(k4))\n\n # index 'i' = (m,n,k,l)\n for i in np.ndindex(size, size, size, size):\n if i[2] <= i[0] and i[3] <= i[1] and i[0] + i[1] < size:\n out[i[2], i[0] - i[2], i[3], i[1] - i[3]] = coef(i[2], i[0] - i[2], i[3], i[1] - i[3]) * input_state[i[0], i[1]] * factorial(i[0]) * factorial(i[1])\n\n return out", "def translation_3D(img, trans_x, trans_y, trans_z, cval=0.):\n \n if trans_x > 0:\n img[trans_x:,...] = img[:-trans_x,...] \n img[:trans_x,...] = cval\n elif trans_x < 0:\n img[:trans_x,...] = img[-trans_x:,...] \n img[trans_x:,...] = cval\n \n if trans_y > 0:\n img[:,trans_y:,:,:] = img[:,:-trans_y,:,:] \n img[:,:trans_y,:,:] = cval\n elif trans_y < 0:\n img[:,:trans_y,:,:] = img[:,-trans_y:,:,:] \n img[:,trans_y:,:,:] = cval\n \n if trans_z > 0:\n img[...,trans_z:,:] = img[...,:-trans_z,:] \n img[...,:trans_z,:] = cval\n elif trans_z < 0:\n img[...,:trans_z,:] = img[...,-trans_z:,:] \n img[...,trans_z:,:,:] = cval\n \n return img", "def Translate(*args, **kwargs):\n return _gdi_.GraphicsMatrix_Translate(*args, **kwargs)", "def two_bs2x4_transform(t1, r1, t2, r2, input_state):\n size = len(input_state)\n output_state = np.zeros((size,) * 4, dtype=complex)\n for m in range(size):\n for n in range(size):\n\n for k in range(m + 1):\n for l in range(n + 1):\n # channels indexes\n ind1 = k\n ind2 = m - k\n ind3 = l\n ind4 = n - l\n coeff = input_state[m, n] * t1**(m - k) * (1j*r1)**k * t2**(n - l) * (1j*r2)**l * factorial(m) * factorial(n) / (factorial(k) * factorial(m - k) * factorial(l) * factorial(n - l))\n output_state[ind1, ind2, ind3, ind4] = output_state[ind1, ind2, ind3, ind4] + coeff\n\n return output_state", "def transform(self, x, y):\n # return self.transform_2D(x, y)\n return self.transform_perspective(x, y)", "def o2transform(self, x, w):\n\n o2t = lambda x, w: K.dot(w, K.dot(x, K.transpose(w)))\n return tf.map_fn(o2t, [x, w])", "def batch_vTAv(A: np.ndarray, v: np.ndarray) -> np.ndarray:\n\n \"\"\" Faster than\n Av = np.matmul(A, v[...,:,None]) # [B, X, 1]\n return np.matmul(v[...,None,:], Av).squeeze((-2, -1)) # [B]\n \"\"\"\n\n return np.einsum(\"...k,...kl,...l->...\", v, A, v)", "def transform(tvec1, rvec1, tvec2, rvec2):\n op = localToGlobal(np.squeeze(tvec2), np.squeeze(rvec2))\n tvec3 = []\n for tvec in tvec1:\n #tvec = tvec.squeeze()\n tvec3.append(np.matmul(op, tvec))\n tvec3 = np.array(tvec3)\n return tvec3", "def estimate_translation(points1, points2):\n xs = points1[:,0]\n ys = points1[:,1]\n x2s = points2[:,0]\n y2s = points2[:,1]\n N = len(xs)\n \n # build b\n b = np.empty((N+N, 1))\n b[::2,0] = xs\n b[1::2,0] = ys\n \n # build A\n A = np.empty((N+N, 3))\n A[::2, 0] = x2s\n A[1::2,0] = y2s\n A[::2, 1] = np.ones(N)\n A[1::2, 1] = np.zeros(N)\n A[::2, 2] = np.zeros(N)\n A[1::2, 2] = np.ones(N)\n \n A = np.linalg.lstsq(A, b)[0][:,0]\n M = [[1, 0, A[1]],\n [0, 1, A[2]]]\n return (A[0], A[1], A[2], np.array(M))", "def translate(dx,dy,Mat):\r\n # MT is the Translation (3 X 3) Matrix\r\n MT=[[1,0,dx],[0,1,dy],[0,0,1]]\r\n Translated= Multiply(MT,Mat)\r\n # Translated[0][0] is the updated x coordinate\r\n # Translated[1][0] is the updated y coordinate\r\n return Translated[0][0],Translated[1][0],Translated[2][0]", "def transl(x, y, z):\n displace_vector = [[x],\n [y],\n [z]]\n return np.matrix(displace_vector)", "def _apply_transformations(structure, rotations, translations):\n # Additional first dimension for 'structure.repeat()'\n assembly_coord = np.zeros((len(rotations),) + structure.coord.shape)\n\n # Apply corresponding transformation for each copy in the assembly\n for i, (rotation, translation) in enumerate(zip(rotations, translations)):\n coord = structure.coord\n # Rotate\n coord = matrix_rotate(coord, rotation)\n # Translate\n coord += translation\n assembly_coord[i] = coord\n\n return repeat(structure, assembly_coord)", "def _z2matmul(self, left, right):\n prod = np.mod(np.dot(left, right), 2)\n return prod", "def get_4x4_transform(scale_x, scale_y, trans_x, trans_y, trans_z):\r\n transform = [[scale_x, 0.0, 0.0, trans_x],\r\n [0.0, scale_y, 0.0, trans_y],\r\n [0.0, 0.0, 1.0, trans_z],\r\n [0.0, 0.0, 0.0, 1.0]]\r\n return transform", "def rigid_transform_3d(xs,ys):\n assert xs.shape == ys.shape\n assert xs.shape[0] == 3, 'The points must be of dimmensionality 3'\n\n # find centroids and H\n x_centroid = np.mean(xs, axis=1)[:, np.newaxis]\n y_centroid = np.mean(ys, axis=1)[:, np.newaxis]\n \n H = (xs - x_centroid)@(ys - y_centroid).T\n\n # find rotation\n U, S, Vt = np.linalg.svd(H)\n rotation = [email protected]\n\n # handling reflection\n if np.linalg.det(rotation) < 0:\n Vt[2, :] *= -1\n rotation = np.dot(Vt.T, U.T)\n \n # find translation\n translation = y_centroid - rotation@x_centroid\n \n return translation, rotation" ]
[ "0.65614295", "0.634234", "0.61342865", "0.61295545", "0.61271703", "0.59918904", "0.5945617", "0.5937629", "0.5931673", "0.59223235", "0.5891538", "0.58714736", "0.5866033", "0.58649784", "0.58559966", "0.5835464", "0.5825817", "0.58026475", "0.5768168", "0.575795", "0.5744753", "0.5701919", "0.56939435", "0.56847537", "0.5679792", "0.5620446", "0.5610276", "0.55997044", "0.55902857", "0.55864346", "0.55767435", "0.55688345", "0.5565349", "0.55588764", "0.55565774", "0.55440617", "0.5542827", "0.55395", "0.5529177", "0.55218875", "0.55043364", "0.5492529", "0.54862", "0.5481359", "0.5471187", "0.5471187", "0.5470529", "0.5470529", "0.5469175", "0.5460456", "0.54527116", "0.54435337", "0.5439173", "0.5433005", "0.54272115", "0.54128236", "0.54107213", "0.540762", "0.54030246", "0.53978175", "0.5395745", "0.53893244", "0.53821987", "0.53804195", "0.5371596", "0.53561425", "0.53515184", "0.5351028", "0.5347509", "0.5338666", "0.5337644", "0.53340507", "0.53336066", "0.5332591", "0.5330443", "0.53278005", "0.532304", "0.53145146", "0.5314213", "0.5310391", "0.53097415", "0.53078336", "0.5304553", "0.5302978", "0.53003705", "0.5297984", "0.52976555", "0.52917415", "0.52877676", "0.5285267", "0.5285039", "0.5282912", "0.52769226", "0.5272112", "0.5267057", "0.5262855", "0.52626705", "0.52594995", "0.5254268", "0.5253577" ]
0.5372871
64
Binary mask from cv2 styled contour (gets filled)
def make_mask(shape, contour): mask = np.zeros(shape, np.int32) cv2.drawContours(mask, [contour], 0, (255), -1) return mask
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_contour_features(mask,selectcell=\"centered\"):\r\n \r\n #binarize image (everything above 0 becomes 1)\r\n mask = np.clip(mask,a_min=0,a_max=1)\r\n\r\n #for contours, dont use RETR_TREE, but RETR_EXTERNAL as we are not interested in internal objects\r\n contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\r\n contours = list(contours)\r\n \r\n #in case there is no contour found, add a dummy contour\r\n if len(contours)==0:\r\n contours = [np.array([[[0, 0]],[[0, 1]],[[1, 1]],[[1, 0]]])] #generate a dummy contour\r\n\r\n #Sort contours, longest first\r\n contours.sort(key=len,reverse=True)\r\n contours = [c for c in contours if len(c)>4] #proper contour should have at least 5 points\r\n hulls = [cv2.convexHull(contour,returnPoints=True) for contour in contours]\r\n\r\n mu_origs = [cv2.moments(contour) for contour in contours]\r\n mu_hulls = [cv2.moments(hull) for hull in hulls]\r\n\r\n area_origs = [mu_orig[\"m00\"] for mu_orig in mu_origs]\r\n area_hulls = [mu_hull[\"m00\"] for mu_hull in mu_hulls]\r\n\r\n #drop events where area is zero\r\n hulls = [hulls[i] for i in range(len(hulls)) if area_origs[i]>0] \r\n contours = [contours[i] for i in range(len(contours)) if area_origs[i]>0]\r\n mu_origs = [mu_origs[i] for i in range(len(mu_origs)) if area_origs[i]>0]\r\n mu_hulls = [mu_hulls[i] for i in range(len(mu_hulls)) if area_origs[i]>0]\r\n area_hulls = [area_hulls[i] for i in range(len(area_hulls)) if area_origs[i]>0]\r\n area_origs = [area_origs[i] for i in range(len(area_origs)) if area_origs[i]>0]\r\n \r\n \r\n pos_x = [int(mu_orig['m10']/mu_orig['m00']) for mu_orig in mu_origs]\r\n pos_y = [int(mu_orig['m01']/mu_orig['m00']) for mu_orig in mu_origs]\r\n\r\n \r\n if selectcell == \"smooth\":\r\n #compute the area ratio (roughness of contour)\r\n area_ratio = np.array(area_hulls)/np.array(area_origs)\r\n #get the contour with minimum roughness (smooth contour)\r\n sorter = np.argsort(area_ratio) #smallest first\r\n\r\n if selectcell == \"centered\":\r\n #select contour that is closest to the center of the image. \r\n #In iPAC, cells are usually in the center.\r\n mid_x,mid_y = mask.shape[0]/2,mask.shape[1]/2 #middle of the image\r\n BB = [cv2.boundingRect(c) for c in contours] #get a bounding box around the object\r\n distances = [np.sqrt((mid_x-bb[0])**2 + (mid_y-bb[1])**2) for bb in BB]\r\n sorter = np.argsort(distances) #smallest first\r\n \r\n #sort values with respect to chosen metric (area_ratio or distance)\r\n contours = [contours[s] for s in sorter]\r\n hulls = [hulls[s] for s in sorter]\r\n pos_x = [pos_x[s] for s in sorter]\r\n pos_y = [pos_y[s] for s in sorter]\r\n mu_origs = [mu_origs[s] for s in sorter]\r\n area_origs = [area_origs[s] for s in sorter]\r\n area_hulls = [area_hulls[s] for s in sorter]\r\n \r\n # draw mask of the chosen contour\r\n mask = np.zeros_like(mask)\r\n cv2.drawContours(mask,contours,0,1,cv2.FILLED)# produce a contour that is filled inside\r\n\r\n hull = hulls[0]#[0:n_contours]\r\n pos_x = pos_x[0]\r\n pos_y = pos_y[0] \r\n mu_orig = mu_origs[0]#[0:n_contours]\r\n area_orig = area_origs[0]#[0:n_contours]\r\n area_hull = area_hulls[0]#[0:n_contours]\r\n \r\n if area_orig>0:\r\n area_ratio = area_hull/area_orig\r\n else:\r\n area_ratio = np.nan\r\n\r\n arc = cv2.arcLength(hull, True) \r\n circularity = 2.0 * np.sqrt(np.pi * mu_orig[\"m00\"]) / arc\r\n\r\n\r\n dic = {\"mask\":mask,\"pos_x\":pos_x,\"pos_y\":pos_y,\"area_orig\":area_orig,\"area_hull\":area_hull,\\\r\n \"area_ratio\":area_ratio,\"circularity\":circularity}\r\n return dic", "def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask", "def sanitize_mask(orig_x, orig_y, mask):\n contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n\n # Draw contours:\n cv2.drawContours(mask, contours, 0, (0, 255, 0), 2)\n # Calculate image moments of the detected contour\n num_objects = (len(contours))\n #threshold\n threshold = 3\n\n center_list = []\n # print(num_objects)\n if num_objects > 1:\n for item in range(num_objects):\n M = cv2.moments(contours[item])\n try:\n center_x = round(M['m10'] / M['m00'])\n center_y = round(M['m01'] / M['m00'])\n center_list.append([center_y , center_x ])\n except:\n pass\n\n # initialize retmask\n retmask = mask\n if num_objects > 1:\n for x, y in center_list:\n if orig_x - threshold <= x <= orig_x + threshold and orig_y - threshold <= y <= orig_y + threshold:\n pass\n else:\n def dfs_removal(px , py, mask):\n R = len(mask)\n C = len(mask[0])\n if mask[px][py ] != 255: \n return\n mask[px][py] = 0\n if 0 <= px - 1 and mask[px - 1][py ] == 255: dfs_removal(px - 1 , py , mask)\n if px + 1 < R and mask[px + 1][py ] == 255: dfs_removal(px + 1 , py , mask)\n if 0 <= py - 1 and mask[px][py - 1] == 255: dfs_removal(px, py -1 , mask)\n if py + 1 < C and mask[px][py + 1] == 255: dfs_removal(px, py + 1 , mask)\n\n dfs_removal(x,y, mask)\n\n return retmask", "def as_boolean_mask(self):\n bbox = self.bbox()\n zs = np.unique([c.image_z_position for c in self.contours])\n z_to_index = dict(zip(zs,range(len(zs))))\n\n # Get dimensions, initialize mask.\n nx,ny = np.diff(bbox[:2], axis=1).astype(int) + 1\n nx = int(nx); ny = int(ny)\n nz = int(zs.shape[0])\n mask = np.zeros((nx,ny,nz), dtype=np.bool)\n\n # We check if these points are enclosed within each contour \n # for a given slice. `test_points` is a list of image coordinate \n # points, offset by the bounding box.\n test_points = bbox[:2,0] + np.c_[ np.where(~mask[:,:,0]) ]\n\n # First we \"turn on\" pixels enclosed by inclusion contours.\n for contour in self.contours:\n if contour.inclusion:\n zi = z_to_index[contour.image_z_position]\n contour_matrix = contour.to_matrix()[:,:2]\n\n # Turn the contour closed if it's not.\n if (contour_matrix[0] != contour_matrix[-1]).all():\n contour_matrix = np.append(contour_matrix,\n contour_matrix[0].reshape(1,2),\n axis=0)\n\n # Create path object and test all pixels\n # within the contour's bounding box.\n path = mplpath.Path(contour_matrix, closed=True)\n contains_pts = path.contains_points(test_points)\n mask[:,:,zi] = contains_pts.reshape(mask.shape[:2])\n\n # Second, we \"turn off\" pixels enclosed by exclusion contours.\n for contour in self.contours:\n if not contour.inclusion:\n zi = z_to_index[contour.image_z_position]\n contour_matrix = contour.to_matrix()[:,:2]\n\n # Turn the contour closed if it's not.\n if (contour_matrix[0] != contour_matrix[-1]).all():\n contour_matrix = np.append(contour_matrix,\n contour_matrix[0].reshape(1,2),\n axis=0)\n\n path = mplpath.Path(contour_matrix, closed=True)\n not_contains_pts = ~path.contains_points(test_points)\n not_contains_pts = not_contains_pts.reshape(mask.shape[:2])\n mask[:,:,zi] = np.logical_and(mask[:,:,zi], not_contains_pts)\n\n # The first and second axes have to \n # be swapped because of the reshape.\n return mask.swapaxes(0,1), bbox[[1,0,2]]", "def get_binary_mask(self,index):\n mask = self.load_mask_png(index)\n (rows,cols) = np.where(mask>0)[0:2] #pixels in mask disregarding the color\n new_mask = np.zeros(shape=mask.shape[0:2], dtype=np.uint8)\n new_mask[(rows,cols)] = 255\n return new_mask", "def get_contour(self, mask):\n\n assert mask.ndim == 2\n assert mask.min() == 0\n assert mask.max() == 1\n contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n assert len(contours) == 1, \"Too many contours in this mask!\"\n contour = contours[0]\n # logging.debug(\"Returning {} fit contours over mask pixels\".format(len(contours)))\n return contour", "def mask(self):\n\n mask = np.zeros(shape=(self._info.height, self._info.width), dtype=np.uint8)\n\n self.draw(image=mask, color=constants.COLOR_WHITE_MONO)\n\n mask_with_border = np.pad(mask, 1, 'constant', constant_values=255)\n\n cv2.floodFill(image=mask,\n mask=mask_with_border,\n seedPoint=(int(self.middle_point[0]), int(self.middle_point[1])),\n newVal=constants.COLOR_WHITE_MONO)\n\n return mask", "def find_contours(mask):\n _, contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_TC89_KCOS)\n return contours", "def get_biomass(binary_mask):\n\n white_pixels = cv2.countNonZero(binary_mask)\n return white_pixels", "def emit_mask_contour(self):\r\n contours = find_contours(self.final_mask, 0.5)\r\n \r\n sig = [contours, self.fillContourButton.isChecked(), self.thicknessSpinBox.value(), self.invertMaskButton.isChecked()]\r\n \r\n self.signal_DMDcontour.emit(sig)", "def apply_mask_to_image(img, mask):\n img_size = img.shape[0]\n mask = cv2.resize(mask, dsize=(img_size, img_size))\n\n # Find contour of the mask\n imgray = mask\n ret,thresh = cv2.threshold(imgray, 127, 255, 0)\n contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\n # Draw contours on image\n segmented_img = cv2.drawContours(img, contours, -1, (0,255,0), 3)\n\n return segmented_img", "def find_contours(mask):\n\n cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n cnts = imutils.grab_contours(cnts)\n return cnts", "def mask_label_contour(image, seg):\n return sitk.Mask(image, sitk.LabelContour(seg+1)==0)", "def __mask_region(self, img, vertices):\n\n mask = np.zeros_like(img) \n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n cv2.fillConvexPoly(mask, vertices, ignore_mask_color)\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def mask_creation(image, mask_path, image_index):\n # convert image to hsv color space\n image = cv2.imread(image)\n \n im_hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)\n h, s, v = cv2.split(im_hsv)\n\n # compute the mean value of hue, saturation and value for the border of the image\n hue_mean_border = (np.mean(h[0, :]) + np.mean(h[:, 0]) + np.mean(h[-1, :]) + np.mean(h[:, -1]))/4\n saturation_mean_border = (np.mean(s[0, :]) + np.mean(s[:, 0]) + np.mean(s[-1, :]) + np.mean(s[:, -1]))/4\n value_mean_border = (np.mean(v[0, :]) + np.mean(v[:, 0]) + np.mean(v[-1, :]) + np.mean(v[:, -1]))/4\n\n # compute lower and upper limits for the mask\n # we need to find the good limits to segment the background by color\n lower_hue = (hue_mean_border - 40)\n upper_hue = (hue_mean_border + 40)\n lower_saturation = (saturation_mean_border - 20)\n upper_saturation = (saturation_mean_border + 20)\n lower_value = (value_mean_border - 200)\n upper_value = (value_mean_border + 200)\n\n lower_limit = np.array([lower_hue, lower_saturation, lower_value])\n upper_limit = np.array([upper_hue, upper_saturation, upper_value])\n\n # create mask\n mask = cv2.inRange(im_hsv, lower_limit, upper_limit)\n mask = cv2.bitwise_not(mask)\n\n # resize masks\n n_mask, m_mask = mask.shape[0], mask.shape[1]\n mask = cv2.resize(mask, (1000, 1000)) \n\n # apply mask to find contours\n mask = np.uint8(mask)\n \n contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n\n # create new mask with the contours found\n mask_contours = cv2.fillPoly(mask, contours, [255, 255, 255])\n\n # Apply morphological filter to clean\n kernel = np.ones((9, 9), np.float32)/25\n mask_erode = cv2.morphologyEx(mask_contours, cv2.MORPH_ERODE, kernel, iterations = 1)\n mask_dilate = cv2.morphologyEx(mask_erode, cv2.MORPH_DILATE, kernel, iterations = 1)\n\n # resize masks to original size\n new_mask = cv2.resize(mask_dilate, (m_mask, n_mask))\n\n # save mask image inside the same folder as the image\n # cv2.imwrite(mask_path + str(image_index).zfill(2) + \"_mask.png\", new_mask)\n\n return new_mask", "def region_of_interest(self,img):\r\n #defining a blank mask\r\n mask = np.zeros_like(img) \r\n #checking number of image channel(color/grayscale) and applying mask\r\n if len(img.shape) > 2:\r\n ignore_mask_color = (255,255,255)\r\n else:\r\n ignore_mask_color = 255\r\n #filling color to pixels inside the polygon \r\n cv2.fillPoly(mask, self.vertices_img, ignore_mask_color)\r\n #image where mask pixels are nonzero\r\n masked_image = cv2.bitwise_and(img, mask)\r\n #cv2.imshow('',masked_image)\r\n return masked_image", "def create_binary_image(img, s_thresh=(100, 255), sx_thresh=(10, 200), dir_thresh=(np.pi/6, np.pi/2), c_thresh=50):\n # We use a combination of gradient and direction threshold\n # convert to gray scale\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # Compute the combined threshold\n sobel_x = sobel_mask(gray, sx_thresh)\n dir_gradient = dir_mask(gray, dir_thresh)\n combined = ((sobel_x == 1) & (dir_gradient == 1))\n\n # Color threshold in RGB color space\n # This helps to detect yellow lanes better, which is a significant issue in the video \n G = img[:,:,1]\n R = img[:,:,2]\n r_g = (R > c_thresh) & (G > c_thresh)\n \n # color channel thresholds\n hls = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)\n S = hls[:,:,2]\n L = hls[:,:,1]\n \n # S channel performs well for detecting bright yellow and white lanes\n s = (S > s_thresh[0]) & (S <= s_thresh[1])\n l = (L > s_thresh[0]) & (L <= s_thresh[1])\n\n # combine all the thresholds\n # The pixel we want is either white or yellow\n color_combined = np.zeros_like(R)\n color_combined[(r_g & l) & (s | combined)] = 1\n \n # apply the region of interest mask\n # This helps to remove the shadow outside the lane\n mask = np.zeros_like(color_combined)\n h, w = img.shape[0], img.shape[1]\n polygon_vertice = np.array([[0,h-1], [w//2, h//2], [w-1, h-1]], dtype=np.int32)\n cv2.fillPoly(mask, [polygon_vertice], 1)\n binary = cv2.bitwise_and(color_combined, mask)\n \n return binary", "def _get_mask(self, anno, idx):\n coco = self.coco\n img_info = coco.loadImgs(self.img_ids[idx])[0]\n\n m = np.zeros((img_info['height'], img_info['width']), dtype=np.float32)\n\n for obj in anno:\n if 'segmentation' in obj:\n if obj['iscrowd']:\n rle = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n m += pycocotools.mask.decode(rle)\n elif obj['num_keypoints'] == 0:\n rles = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n for rle in rles:\n m += pycocotools.mask.decode(rle)\n\n return m < 0.5", "def get_mask(self, img):\n raise NotImplementedError()", "def create_binary_masks(image_path):\n mask = cv2.imread(image_path, cv2.IMREAD_ANYDEPTH)\n size = mask.shape\n for row_pixel in range(0, size[0]):\n for column_pixel in range(0, size[1]):\n if mask[row_pixel, column_pixel] == 0:\n mask[row_pixel, column_pixel] = 65535\n\n else:\n mask[row_pixel, column_pixel] = 0\n\n cv2.imwrite(image_path[:-4]+'_binary.png', mask)", "def get_rectangles_mask(self, thresh: np.ndarray) -> np.ndarray:\r\n contours = cv.findContours(thresh, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)[0]\r\n mask = np.zeros(thresh.shape, np.uint8)\r\n good_contours = sorted(\r\n [cnt for cnt in contours if 100000 < cv.contourArea(cnt) < 200000],\r\n key=cv.contourArea,\r\n )\r\n\r\n setattr(self, \"contour1\", good_contours[0])\r\n setattr(\r\n self,\r\n \"contour2\",\r\n good_contours[1]\r\n if cv.pointPolygonTest(\r\n good_contours[1], tuple(good_contours[0][0][0]), False\r\n )\r\n < 0\r\n else good_contours[2],\r\n )\r\n\r\n cv.drawContours(mask, [self.contour1], 0, 255, -1)\r\n cv.drawContours(mask, [self.contour2], 0, 255, -1)\r\n\r\n return mask", "def preprocessing(self, img):\n [a, contours, c] = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n return contours", "def find_contour(ctx: Context):\n cv2.copyTo(ctx.filter_image, np.ones_like(ctx.temp_image1), ctx.temp_image1)\n contours, _ = cv2.findContours(ctx.temp_image1, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n\n # take the 5 biggest areas\n contours = sorted(contours, key=lambda c: math.fabs(cv2.contourArea(c)), reverse=True)[:5]\n\n # approximate contours with poly line\n ctx.contours = [cv2.approxPolyDP(c, 2, True) for c in contours]", "def compute_contour_binary_masks(\n contour1: np.ndarray,\n contour2: np.ndarray,\n max_size: int = DEFAULT_MAX_CONTOUR_MASK_SIZE,\n) -> typing.Tuple[np.ndarray, np.ndarray]:\n points = np.concatenate([contour1, contour2], axis=0)\n offset = points.min(axis=0)\n points, contour1, contour2 = [v - offset for v in [points, contour1, contour2]]\n scale = min(max_size / points.max(axis=0).min(), 1)\n if scale < 1:\n points, contour1, contour2 = [v * scale for v in [points, contour1, contour2]]\n w, h = points.max(axis=0).astype(\"int32\")\n im1, im2 = [\n cv2.drawContours(\n np.zeros((h, w), dtype=\"uint8\"),\n contours=(box[np.newaxis]).round().astype(\"int32\"),\n color=255,\n thickness=-1,\n contourIdx=0,\n )\n > 0\n for box in [contour1, contour2]\n ]\n return im1, im2", "def inflate_mask(mask):\n kernel = np.ones((12, 12), np.uint8)\n return cv2.dilate(mask, kernel, 1)", "def _draw_contour(self, img):\n if self.mask is None or self.contour_width == 0:\n return img\n\n mask = self._get_bolean_mask(self.mask) * 255\n contour = Image.fromarray(mask.astype(np.uint8))\n contour = contour.resize(img.size)\n contour = contour.filter(ImageFilter.FIND_EDGES)\n contour = np.array(contour)\n\n # make sure borders are not drawn before changing width\n contour[[0, -1], :] = 0\n contour[:, [0, -1]] = 0\n\n # use gaussian to change width, divide by 10 to give more resolution\n radius = self.contour_width / 10\n contour = Image.fromarray(contour)\n contour = contour.filter(ImageFilter.GaussianBlur(radius=radius))\n contour = np.array(contour) > 0\n contour = np.dstack((contour, contour, contour))\n\n # color the contour\n ret = np.array(img) * np.invert(contour)\n if self.contour_color != 'black':\n color = Image.new(img.mode, img.size, self.contour_color)\n ret += np.array(color) * contour\n\n return Image.fromarray(ret)", "def draw_contours(self, image, maskImg):\r\n # Required variables..\r\n x, y, width, height = 0, 0, 0, 0\r\n # Find contours..\r\n contours, hierarchy = cv2.findContours(image=maskImg, mode=cv2.RETR_EXTERNAL, method=cv2.CHAIN_APPROX_NONE) # Playable Parameters..\r\n # Draw the contours..\r\n for contour in contours:\r\n # Calculate the area of the contour, so can remove unnecessary contours..\r\n area = cv2.contourArea(contour=contour)\r\n if area > 3000: # Playable adjustment..!! Found Good as 3000 for current light condition.. change this if light condition changes..\r\n # Draw the contours to the image -- actual frame..\r\n if self.debug_mode:\r\n cv2.drawContours(image=image, contours=contour, contourIdx=-1, color=(255, 255, 0), thickness=4)\r\n # Find the perimeter of the markers detected...\r\n perimeter = cv2.arcLength(curve=contour, closed=True)\r\n # Approximating/Finding the corners of the image from the obtained corners..\r\n approx_corners = cv2.approxPolyDP(curve=contour, epsilon=0.02 * perimeter, closed=True)\r\n # Find the bounding box rectangle for the approximated corners..\r\n x, y, width, height = cv2.boundingRect(approx_corners)\r\n # Return the values with which a rectangle can be drawn..\r\n return x, y, width, height", "def get_building_contour(current_building_mask):\n ret, threshed = cv.threshold(current_building_mask, 0, 2 ** 16, cv.THRESH_BINARY)\n compressed = threshed.astype(np.uint8)\n current_building_contour, hierarchy = cv.findContours(compressed, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_NONE)\n return current_building_contour, hierarchy", "def get_regions_mask(self, input):", "def get_mask(self, anno, img_info) -> np.ndarray:\n m = np.zeros((img_info[\"height\"], img_info[\"width\"]), dtype=np.float32)\n\n for obj in anno:\n if obj[\"iscrowd\"]:\n rle = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n m += mask\n elif obj[\"num_keypoints\"] == 0:\n rles = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n for rle in rles:\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n\n m += mask\n\n return (m < 0.5).astype(np.float32)", "def __mask(input, mask):\n return cv2.bitwise_and(input, input, mask=mask)", "def find_original_contours(self):\r\n\r\n # Convert to gray, threshold and invert the image. Also save a thresholded but non-inverted image copy\r\n gray = cv2.cvtColor(self.image, cv2.COLOR_BGR2GRAY)\r\n self.thresh_invert = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,\r\n int(self.cell_size), 5)\r\n self.thresh_orig = 255 - self.thresh_invert\r\n\r\n # Find the contours of the image. Each contour should correspond to a cell\r\n orig_contours = cv2.findContours(self.thresh_invert, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\r\n orig_contours = orig_contours[0] if len(orig_contours) == 2 else orig_contours[1]\r\n for block in orig_contours:\r\n area = cv2.contourArea(block)\r\n\r\n # If the contours are not too large, we draw them over the image to remove the digits in the grid\r\n if area < self.min_cell_size:\r\n cv2.drawContours(self.thresh_invert, [block], -1, (0, 0, 0), -1)", "def region_of_interest(self, img):\n # defining a blank mask to start with\n mask = np.zeros_like(img)\n\n # defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n\n # filling pixels inside the polygon defined by \"vertices\" with the fill color\n cv2.fillPoly(mask, self.vertices, ignore_mask_color)\n\n # returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def fill_vert(self, mask):\n im_floodfill = np.copy(mask)\n im_floodfill[im_floodfill!=self.vertebra_id] = 0\n im_floodfill[im_floodfill==self.vertebra_id] = 255\n im_floodfill_copy = np.copy(im_floodfill)\n # The size needs to be 2 pixels larger than the image.\n h, w = im_floodfill.shape[:2]\n mask4mask = np.zeros((h+2, w+2), np.uint8)\n # Floodfill from point (0, 0)\n cv2.floodFill(im_floodfill, mask4mask, (0,0), 255)\n # Invert floodfilled image\n im_floodfill_inv = cv2.bitwise_not(im_floodfill)\n # Combine the two images to get the foreground.\n im_floodfill_inv = im_floodfill_inv | im_floodfill_copy\n im_floodfill_inv[im_floodfill_inv==255] = self.vertebra_id\n mask_filled = mask | im_floodfill_inv\n return mask_filled", "def get_contours(mask, threshold_area):\n contours, hierarchy = cv2.findContours(mask,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n\n return [x for x in contours if cv2.contourArea(x) > threshold_area], hierarchy", "def get_contours(mask, threshold_area):\n contours, hierarchy = cv2.findContours(mask,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n\n return [x for x in contours if cv2.contourArea(x) > threshold_area], hierarchy", "def mask(self, byclass, height, width, margin=0, figsize=(10, 10), dpi=180):\n # make ~binary mask using available classes\n style = {cls: ('k', '-') for cls in byclass}\n fig = Figure(figsize=figsize)\n fig.tight_layout(pad=0)\n fig.subplots_adjust(hspace=0, wspace=0, left=0, right=1, bottom=0, top=1)\n canvas = FigureCanvas(fig)\n ax = fig.subplots(1, 1)\n self.show_style(ax, style, byclass)\n ax.set_xlim(0 - margin, height + margin)\n ax.set_ylim(0 - margin, width + margin)\n canvas.draw()\n mask = self.figure_buffer(fig, dpi=dpi)\n mask = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)\n # fill in the gaps via:\n # https://www.learnopencv.com/filling-holes-in-an-image-using-opencv-python-c/\n _, thresholded = cv2.threshold(mask, 220, 255, cv2.THRESH_BINARY_INV);\n floodfilled = thresholded.copy()\n h, w = thresholded.shape[:2]\n mask = np.zeros((h + 2, w + 2), np.uint8)\n cv2.floodFill(floodfilled, mask, (0, 0), 255);\n mask = cv2.bitwise_not(thresholded | cv2.bitwise_not(floodfilled))\n return mask", "def contours(self, image,debug=False):\n imgray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n if debug: cv2.imwrite('debug_pics/gray_scale_contour.jpg',imgray) # cv2.imshow('gray_scale_contour',imgray)\n im2, contours, hierarchy = cv2.findContours(imgray,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n\n return contours,hierarchy", "def get_mask(data):\n # saturated CCD count\n saturation_adu = 63000\n\n mask_sat = (data[:, 20:-20] >= saturation_adu)\n\n mask_bad = np.zeros_like(data[:, 20:-20], dtype=np.int16)\n # currently no bad pixels in FOCES CCD\n\n mask = np.int16(mask_sat)*4 + np.int16(mask_bad)*2\n\n return mask", "def get_masked_scene(orig, mask, local_context_size = 80, dilation=False):\n orig_scene = orig.copy()\n mask_scene = mask.copy()\n orig_scene_no_mask = orig.copy()\n \n mask_info = np.where(mask_scene == 0) \n min_x = max(min(mask_info[0]) - local_context_size, 0)\n max_x = max(mask_info[0]) + local_context_size\n min_y = max(min(mask_info[1]) - local_context_size, 0)\n max_y = max(mask_info[1]) + local_context_size\n \n orig_scene = orig_scene[min_x:max_x,min_y:max_y]\n orig_scene_no_mask = orig_scene_no_mask[min_x:max_x,min_y:max_y]\n mask_scene = mask_scene[min_x:max_x,min_y:max_y]\n \n dialation_mask = np.zeros(mask_scene.shape) + 255\n \n if dilation:\n dialation_mask = cv2.dilate(255-mask_scene, np.ones((local_context_size,local_context_size)))\n \n #implot(dialation_mask)\n #plt.imshow(dialation_mask, 'gray')\n \n for x in range(mask_scene.shape[0]):\n for y in range(mask_scene.shape[1]):\n if mask_scene[x, y] == 0:\n orig_scene[x, y, :] = 0\n orig_scene_no_mask[x,y,:] = 0\n if dilation:\n if dialation_mask[x,y] == 0:\n orig_scene[x, y, :] = 0\n \n return orig_scene, mask_scene, orig_scene_no_mask, dialation_mask", "def _prepare_mask_file(mask):\n result = np.ndarray((mask.shape[0], mask.shape[1]), dtype=np.uint8)\n for i in range(mask.shape[0]):\n for j in range(mask.shape[1]):\n\n if mask[i][j] > 0:\n result[i][j] = 1\n else:\n result[i][j] = 0\n \n return result", "def fillHoles(img):\n out,contour,hierarchy = cv2.findContours(img,cv2.RETR_CCOMP,cv2.CHAIN_APPROX_NONE)\n i=0\n for cnt in contour:\n cv2.drawContours(img,contour,i,255,-1)\n i+=1\n return img", "def get_contours(image):\n assert len(image.shape) == 2, 'Image should be binary'\n contour_image = image.copy()\n contour_image[contour_image == 1] = 255\n _, contours, _ = cv2.findContours(contour_image.astype('uint8'),\n cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n return contours", "def detect(self, mask):\n # 1) Return Non zero indices\n det_idx = np.where(mask > 0.0)\n idx_x, idx_y = det_idx[0], det_idx[1]\n # 2) Create 1x1 box for each pixel detected.\n detections = []\n for i in range(0, len(idx_x)):\n x, y = idx_x[i], idx_y[i]\n detections.append((x, y, x+1, y+1, 1)) # x1, y1, x2, y2, area\n # 3) merge boxes\n bounding_boxes = self.bounding_boxes(detections)\n return bounding_boxes", "def fillContour(img, cnt, color = (255,255,0)):\n\tcv2.drawContours(img, [cnt], 0, color, -1)", "def region_of_interest(self, img, vertices):\r\n mask = np.zeros_like(img)\r\n # if len(img.shape) > 2:\r\n # channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\r\n # ignore_mask_color = (255,) * channel_count\r\n # else:\r\n # ignore_mask_color = 255\r\n\r\n #filling pixels inside the polygon defined by \"vertices\" with the fill color\r\n cv2.fillPoly(mask, vertices, 255)\r\n\r\n #returning the image only where mask pixels are nonzero\r\n masked_image = cv2.bitwise_and(img, mask)\r\n\r\n return masked_image", "def bound_green_object(self, img):\n\n self.x = self.y = self.w = self.h = 0\n\n # Apply the threshold to get the green parts\n masked_img = self.threshold(img)\n\n # Get contours\n img_gray = cv2.cvtColor(masked_img, cv2.COLOR_BGR2GRAY)\n _, contours, _ = cv2.findContours(img_gray, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n if len(contours) > 0:\n # Find the largest contour\n areas = [cv2.contourArea(c) for c in contours]\n max_index = np.argmax(areas)\n max_contour = contours[max_index]\n\n self.perimeter_size = cv2.arcLength(max_contour, True)\n\n if self.perimeter_size < MIN_PERIMETER:\n return img\n\n # Draw rectangle bounding box on image\n self.x, self.y, self.w, self.h = cv2.boundingRect(max_contour)\n\n # if wider than long, ignore\n if float(self.h) / self.w < 1:\n self.x = self.y = self.w = self.h = 0\n return img\n\n cv2.rectangle(img, (self.x, self.y), (self.x + self.w, self.y + self.h), color=(0, 255, 0), thickness=2)\n\n # get center\n M = cv2.moments(max_contour)\n if M['m00'] > 0:\n cx = int(M['m10']/M['m00'])\n cy = int(M['m01']/M['m00'])\n # draw a circle on the center\n cv2.circle(img, (cx, cy), 20, (0,0,255), -1)\n # calc ang error\n self.ang_error = cx - 320\n\n return img", "def create_mask(frame):\n \n # detect ridges\n ridges = enhance_ridges(frame)\n\n # threshold ridge image\n thresh = filters.threshold_otsu(ridges)\n thresh_factor = 1.1\n prominent_ridges = ridges > thresh_factor*thresh\n prominent_ridges = morphology.remove_small_objects(prominent_ridges, min_size=128)\n\n # the mask contains the prominent ridges\n mask = morphology.convex_hull_image(prominent_ridges)\n mask = morphology.binary_erosion(mask, disk(10))\n return mask", "def region_of_interest(img, vertices, debug = False):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def clean_mask(mask, background=0):\n kernels = [\n np.array([[ 1, -1, -1], [-1, 1, -1], [-1, -1, -1]]), # top left standalone pixel\n np.array([[-1, -1, 1], [-1, 1, -1], [-1, -1, -1]]), # top right standalone pixel\n np.array([[-1, -1, -1], [-1, 1, -1], [ 1, -1, -1]]), # bottom left standalone pixel\n np.array([[-1, -1, -1], [-1, 1, -1], [-1, -1, 1]]) # bottom right standalone pixel\n ]\n\n proc_masks = [cv2.morphologyEx(mask, cv2.MORPH_HITMISS, kernel).astype(np.bool) for kernel in kernels]\n\n for proc_mask in proc_masks:\n mask[proc_mask] = background\n return mask", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def img_process(fgMask):\n backSub = cv.createBackgroundSubtractorKNN()\n kernel1 = cv.getStructuringElement(shape=cv.MORPH_ELLIPSE, ksize=(2,2))\n kernel2 = cv.getStructuringElement(shape=cv.MORPH_ELLIPSE, ksize=(2,2))\n #kernel1 = np.ones((3,3),np.uint8)\n #kernel2 = np.ones((3,3), np.uint8)\n\n fgMask = cv.threshold(fgMask, 230, 255, cv.THRESH_BINARY)[1]\n fgMask = cv.morphologyEx(fgMask, cv.MORPH_OPEN, kernel1,iterations = 2)\n fgMask = cv.dilate(fgMask, kernel2, iterations = 2)\n fgMask = cv.morphologyEx(fgMask, cv.MORPH_CLOSE, kernel2, iterations = 2)\n return fgMask", "def preprocess_mask(y):\n y[y <= 255./2] = 0 # Needs to be in this order, otherwise 1 gets overwritten\n y[y > 255./2] = 1\n binary_mask = y.astype(np.uint8)\n\n return binary_mask", "def Predict_Image_Contours(img, mask_full, feature_dict, filename = None, path = ''):\n for ii in range(len(feature_dict)):\n Type = feature_dict[str(ii)] ## So first key is 1\n if Type=='modern_build':\n color_rgb = (255,0,0)\n elif Type=='trad_build':\n color_rgb = (0,0,255)\n mask = mask_full[:,:,ii]\n mask = 255*mask.round().astype('uint8')\n mask = np.stack((mask,mask, mask),-1)\n mask = cv2.cvtColor(mask, cv2.COLOR_RGB2GRAY);\n ret, thresh = cv2.threshold(mask, 127.5, 255, cv2.THRESH_BINARY)\n\n contours,hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n ##print('here')\n area_thresh =30 ## Depends on what the minimal building size desired is\n for cnt in contours:\n ## Contours, flag of whether curve is closed or not\n epsilon = 0.025*cv2.arcLength(cnt,True)\n ## Contours, epsilon for wiggliness, closed shape or not\n approx = cv2.approxPolyDP(cnt,epsilon,True)\n ## Extract Area Dest image, contours, contour index\n area = cv2.contourArea(approx)\n ## centroid computed from moments\n M = cv2.moments(cnt) \n if area > area_thresh:\n if Type=='modern_build':\n rect = cv2.minAreaRect(cnt)\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n img = cv2.drawContours(image = img, contours = [box], \n contourIdx = 0, color = color_rgb, \n thickness = 2)\n elif Type=='trad_build':\n (x,y),radius = cv2.minEnclosingCircle(cnt)\n center = (int(x),int(y))\n radius = int(radius)\n img = cv2.circle(img,center,radius,color_rgb,2)\n elif Type=='Forest':\n img = cv2.drawContours(image = img, contours = [cnt], \n contourIdx = 0, color = color_rgb, \n thickness = 2)\n elif Type=='Bare':\n img = cv2.drawContours(image = img, contours = [cnt], \n contourIdx = 0, color = color_rgb, \n thickness = 2)\n if filename is not None:\n try: \n if path == '':\n path = 'Predictions'\n os.makedirs(path)\n except OSError as error: \n print('') \n fig, ax = plt.subplots(figsize=(18, 20))\n ax.imshow(img[:,:,0:3])\n plt.tight_layout()\n plt.savefig(path + '/' + filename, bbox_inches='tight') \n plt.close(fig)\n \n return img", "def red_contour(image, save_images=False, fname=None):\n original = image.copy()\n\n b, g, r = cv2.split(image)\n bw0 = (r[:,:]>150).astype(np.uint8)*255\n\n bw1 = cv2.divide(r, g[:, :] + 1)\n bw1 = (bw1[:, :] > 1.5).astype(np.uint8)*255\n bw1 = np.multiply(bw1, bw0).astype(np.uint8) * 255\n bw2 = cv2.divide(r, b[:,:]+1)\n bw2 = (bw2[:, :] > 1.5).astype(np.uint8)*255\n\n bw = np.multiply(bw1, bw2).astype(np.uint8) * 255\n kernel = np.ones((5, 5), np.uint8)\n bw = cv2.morphologyEx(bw, cv2.MORPH_OPEN, kernel)\n bw = cv2.dilate(bw, kernel, iterations=1)\n _, bw = cv2.threshold(bw,0,255,0)\n\n # Now get the actual contours. Note that contour detection requires a\n # single channel image. Also, we only want the max one as that should be\n # where the sewn patch is located.\n (cnts, _) = cv2.findContours(bw, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n if len(cnts) == 0:\n cv2.imwrite(\"problem_original.png\", original)\n cv2.imwrite(\"problem_bw.png\", bw)\n print(\"Problem, len(cnts) == 0. See saved images...\")\n sys.exit()\n cnt_largest = max(cnts, key = lambda cnt: cv2.contourArea(cnt))\n\n # Find the centroid in _pixel_space_. Draw it.\n try:\n M = cv2.moments(cnt_largest)\n cX = int(M[\"m10\"] / M[\"m00\"])\n cY = int(M[\"m01\"] / M[\"m00\"])\n if save_images:\n peri = cv2.arcLength(cnt_largest, True)\n approx = cv2.approxPolyDP(cnt_largest, 0.02*peri, True)\n cv2.circle(image_bgr, (cX,cY), 50, (0,0,255))\n cv2.drawContours(image_bgr, [approx], -1, (0,255,0), 2)\n cv2.putText(img=image_bgr, \n text=\"{},{}\".format(cX,cY), \n org=(cX+10,cY+10), \n fontFace=cv2.FONT_HERSHEY_SIMPLEX,\n fontScale=1, \n color=(255,255,255), \n thickness=2)\n cv2.imwrite(fname.replace('.png','_cnt.png'), image_bgr)\n return (cX,cY)\n except:\n print(\"PROBLEM CANNOT DETECT CONTOUR ...\")", "def image_mask(image, patch_R, patch_C, seg_model):\n\n im = Image.open(image)\n im_name = os.path.basename(image).split('.')[0]\n im_width, im_height = im.width, im.height\n\n N = patch_R // patch_C\n\n W_ps_NI = im_width // patch_C # 31782 // 256 = 124\n # W_ps_NR = slide_width % patch_C # 31782 % 256 = 38\n H_ps_NI = im_height // patch_R # 24529 // 1024 = 23\n # H_ps_NR = slide_height % patch_R # 24529 % 1024 = 977\n\n cell_ratio = 0.85 # the threshold that decide the patch is background or not\n\n output_dir = os.path.join(current_path, \"..\", \"output\", \"output_mask\")\n if not os.path.isdir(output_dir): os.makedirs(output_dir)\n\n np_im = np.array(im)[:, :, 0:3] # exclude alpha\n for w in range(W_ps_NI):\n for h in range(H_ps_NI):\n subHIC = np_im[h * patch_R: (h+1) * patch_R, w * patch_C:(w+1) * patch_C, :]\n\n # rgb three channels value that >200 and <40 are ignored segment\n rgb_s = (abs(subHIC[:, :, 0] - 120) >= 80) & (abs(subHIC[:, :, 1] - 120) >= 80) & (\n abs(subHIC[:, :, 2] - 120) >= 80) # >200 <40\n\n if np.sum(rgb_s) <= (patch_R * patch_C) * cell_ratio:\n # segment\n subHIC = np.where(rgb_similarity(subHIC, 15, 195), 250, subHIC)\n # adjust equalization histogram and adjust brightness\n for k in range(subHIC.shape[2]):\n clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(N * 4, 4))\n subHIC[:, :, k] = clahe.apply(subHIC[:, :, k])\n subHIC = exposure.adjust_gamma(subHIC, gamma=1.5)\n subHIC = subHIC.reshape(N, patch_C, patch_C, 3)\n\n subHIC = subHIC.reshape(N, patch_C, patch_C, 3)\n allmask_prob_list = maskrcnn_detection(seg_model, subHIC)\n\n for i in range(len(allmask_prob_list)):\n for layer in range(allmask_prob_list[i].shape[2]):\n image, cnts, hierarchy = cv2.findContours(allmask_prob_list[i][:, :, layer],\n cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n np_im[h * patch_R + i * patch_C: h * patch_R + (i + 1) * patch_C, w * patch_C:(w + 1) * patch_C,\n :] = cv2.drawContours(np_im[h * patch_R + i*patch_C: h*patch_R+(i+1)*patch_C, w * patch_C:(w + 1) * patch_C, :],\n cnts, -1, (0, 255, 0), 1)\n\n # np_im[h * patch_R + i*patch_C: h*patch_R+(i+1)*patch_C, w * patch_C:(w + 1) * patch_C, :] = subHIC[i]\n\n # plt.savefig(os.path.join(output_dir, f\"{im_name}w{w}h{h}N{i}.png\"))\n\n io.imsave(os.path.join(output_dir, f\"{im_name}.png\"), np_im)", "def vis_mask(img, mask,width,height, col, alpha=0.4, show_border=True, border_thick= -1):\n\n img = img.astype(np.float32)\n idx = np.nonzero(mask)\n #np.PredictionBoxes(col)\n img[idx[0], idx[1], :] *= 1.0 - alpha\n img[idx[0], idx[1], :] += alpha * (400/255.0)\n\n if show_border:\n _, contours, _ = cv2.findContours(\n mask.copy(), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)\n cv2.drawContours(img, contours, -1,col, border_thick, cv2.LINE_AA)\n #cv2.drawContours(c, contours, -1, 1, border_thick, cv2.LINE_AA)\n\n return img.astype(np.uint8)", "def boundary_contour(self, time):\n ti = np.where(time == self.times)[0][0]\n image_mask = binary_erosion(binary_fill_holes(binary_dilation(self.masks[ti])))\n padded_mask = np.pad(image_mask, 1, 'constant', constant_values=0)\n c_out = find_contours(padded_mask, level=0.5, fully_connected=\"high\")\n x_cont = self.x[ti][np.floor(c_out[0][:, 0]).astype(int), np.floor(c_out[0][:, 1]).astype(int)]\n y_cont = self.y[ti][np.floor(c_out[0][:, 0]).astype(int), np.floor(c_out[0][:, 1]).astype(int)]\n ordered_coords = np.vstack([x_cont, y_cont])\n return ordered_coords", "def do_full(self, image,hsv,upper,lower,debug=False):\n single_color_img = self.extract_single_color_range(image,hsv,lower,upper)\n if debug:\n # cv2.imshow('single_color_img',single_color_img)\n cv2.imwrite('debug_pics/single_color_img.jpg',single_color_img)\n single_channel = self.threshold_image(single_color_img,debug)\n if debug:\n # cv2.imshow('single_channel',single_channel)\n cv2.imwrite('debug_pics/single_channel.jpg',single_channel)\n cont,hierarchy = self.contours(single_channel,debug)\n\n if debug:\n for i,cnt in enumerate(cont):\n cv2.drawContours(single_channel,cont,i,(0,0,255),2)\n if debug: cv2.imwrite('debug_pics/contours.jpg',single_channel) #cv2.imshow('contours',single_channel)\n\n return self.get_bricks(cont)", "def getHitmask(image):\n mask = []\n for x in range(image.get_width()):\n mask.append([])\n for y in range(image.get_height()):\n mask[x].append(bool(image.get_at((x,y))[3]))\n return mask", "def getHitmask(image):\n mask = []\n for x in range(image.get_width()):\n mask.append([])\n for y in range(image.get_height()):\n mask[x].append(bool(image.get_at((x,y))[3]))\n return mask", "def getHitmask(image):\n mask = []\n for x in range(image.get_width()):\n mask.append([])\n for y in range(image.get_height()):\n mask[x].append(bool(image.get_at((x,y))[3]))\n return mask", "def _draw_mask_on_image(self, mask):\n mask = self.STANDARD_COLORS_ARRAY[mask]\n cv2.addWeighted(mask,self.config.ALPHA,self.image,1.0,0,self.image)", "def create_binary(image):\n #Channel 1 of the output image highlights the area consisting of the nuclei\n channel1=image[:,:,0]\n \n # Channel 2 of the output image consists of the boundaries between adjoining nuclei\n channel2=image[:,:,1]\n _,channel1=cv2.threshold(channel1, 127,255,cv2.THRESH_BINARY) \n _,channel2=cv2.threshold(channel2, 127,255,cv2.THRESH_BINARY) \n \n #Subtracting channel 2 from channel 1 to get the desired output\n img1=channel1-channel2\n \n return img1", "def mask_rectangle_img(image, left_bottom, left_top, right_top, right_bottom,\n ignore_mask_color=255):\n mask_img = np.zeros_like(image)\n vertices = np.array([[left_bottom, left_top, right_top, right_bottom]],\n dtype=np.int32)\n cv2.fillPoly(mask_img, vertices, ignore_mask_color)\n return cv2.bitwise_and(image, mask_img)", "def apply_mask(image, mask):\n image = image.astype(np.uint8)\n image = np.array(image)\n \n for c in range(3):\n image[:, :, c] = np.where(mask == 1,\n cv2.blur(image[:, :, c],(40,40)),\n image[:, :, c])\n return image", "def load_mask(self, image_id):\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"face\":\n return super(self.__class__, self).load_mask(image_id)\n info = self.image_info[image_id]\n mask = np.zeros([info['height'], info['width'], len(info['boundingbox'])], dtype=np.uint8)\n for i, p in enumerate(info['boundingbox'].values()):\n rr, cc = skimage.draw.polygon(p['y'], p['x'])\n mask[rr, cc, i] = 1\n return mask, np.ones([mask.shape[-1]], dtype=np.int32)", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def region_of_interest(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def getHitmask(self,image):\n\t\tmask = []\n\t\tfor x in range(image.get_width()):\n\t\t\tmask.append([])\n\t\t\tfor y in range(image.get_height()):\n\t\t\t\tmask[x].append(bool(image.get_at((x,y))[3]))\n\t\treturn mask", "def detect_contours(self):\r\n (contours, _) = cv2.findContours(self.image.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\r\n return [DbugContour(cv_contour=contour) for contour in contours]", "def img_roi(img, vertices):\n #defining a blank mask to start with\n mask = np.zeros_like(img) \n \n #defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n #filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n #returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def binarize(img):\n image = cv.cvtColor(img, cv.COLOR_BGR2GRAY)\n image = cv.GaussianBlur(image, (3, 3), 0)\n ret, image = cv.threshold(image, 0, 255, cv.THRESH_BINARY_INV | cv.THRESH_OTSU)\n return image", "def drawContour(im,draw):\r\n img = im.filter(ImageFilter.BLUR)\r\n img = im.filter(ImageFilter.SMOOTH)\r\n img = cv.cvtColor(numpy.array(img), cv.COLOR_RGB2BGR)\r\n edges = cv.Canny(img,100,200)\r\n pos = numpy.nonzero(edges)\r\n pos2 = [(pos[0][i],pos[1][i]) for i in range(0,len(pos[0]))]\r\n pos3=[tuple(map(lambda x:int(round(x/32)),i)) for i in pos2]\r\n pos3 = [(i[1],i[0]) for i in pos3]\r\n for i in pos3:\r\n if pos3.count((i[0]+1,i[1]))>20 and i[0]<16 and i[1]<16:\r\n draw.line([(32*i[0],32*i[1]),(32*(i[0]+1),32*i[1])],fill=(0,0,0),width=5)\r\n if pos3.count((i[0],i[1]+1))>20 and i[0]<16 and i[1]<16:\r\n draw.line([(32*i[0],32*i[1]),(32*(i[0]),32*(i[1]+1))],fill=(0,0,0),width=5)", "def binary_mask_to_polygon(binary_mask, tolerance=0):\r\n\r\n polygons = []\r\n if isinstance(binary_mask, torch.Tensor):\r\n binary_mask = binary_mask.cpu().numpy()\r\n # pad mask to close contours of shapes which start and end at an edge\r\n padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0)\r\n contours = measure.find_contours(padded_binary_mask, 0.5)\r\n contours = np.subtract(contours, 1)\r\n for contour in contours:\r\n contour = close_contour(contour)\r\n contour = measure.approximate_polygon(contour, tolerance)\r\n if len(contour) < 3:\r\n continue\r\n contour = np.flip(contour, axis=1) # x, y\r\n polygon = np.maximum(contour, 0)\r\n #segmentation = contour.ravel().tolist()\r\n # after padding and subtracting 1 we may get -0.5 points in our segmentation\r\n #segmentation = [0 if i < 0 else i for i in segmentation]\r\n polygons.append(polygon)\r\n\r\n return polygons", "def test(shape=(1000,2000)):\n mask = Mask()\n mask.addCircle(400,300,250)\n mask.subtractCircle(400,300,150)\n mask.addRectangle(350,250,1500,700)\n plt.imshow( mask.getMask(shape) )\n return mask", "def cmask(self):\n mask = np.zeros(18)\n if 'full' in self.CONS: mask[:] = 1\n if 'f0' in self.CONS: mask[0] = 1\n if 'f1' in self.CONS: mask[1:4] = 1\n if 'f2' in self.CONS: mask[4:10] = 1\n if 'vx' in self.CONS: mask[10] = 1\n if 'vy' in self.CONS: mask[11] = 1\n if 'vz' in self.CONS: mask[12] = 1\n if 'TG' in self.CONS: mask[13:18] = 1\n return mask>0", "def region_of_interest(img, vertices):\n \n # defining a blank mask to start with\n mask = np.zeros_like(img) \n \n # defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n # filling pixels inside the polygon defined by \"vertices\" with the fill color \n cv2.fillPoly(mask, vertices, ignore_mask_color)\n \n # returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n \n return masked_image", "def show_holes_on_img(mask,img):\n labeled, num_objects = ndi.label(mask)\n slices = ndi.find_objects(labeled)\n radius=9\n out_image = img.copy()\n out_image = cv2.cvtColor(out_image, cv2.COLOR_GRAY2RGB)\n for dy,dx in slices:\n x_center = (dx.start + dx.stop - 1)/2\n y_center = (dy.start + dy.stop - 1)/2 \n center=(x_center,y_center)\n cv2.circle(out_image, center, radius,(111,17,108),thickness=2)\n\n plt.figure()\n plt.imshow(out_image)\n plt.autoscale(False)\n return out_image", "def mask_overlay(image, mask, color=(0, 1, 0)):\n mask = np.dstack((mask, mask, mask)) * np.array(color)\n weighted_sum = cv2.addWeighted(mask, 0.5, image, 0.5, 0.)\n img = image.copy()\n ind = mask[:, :, 1] > 0\n img[ind] = weighted_sum[ind] \n return img", "def apply_mask(image, mask, color, alpha=0.5):\n for c in range(3):\n image[:, :, c] = np.where(mask == 1,\n image[:, :, c] *\n (1 - alpha) + alpha * color[c] * 255,\n image[:, :, c])\n # cv2.imshow(\"TEST\",image.astype(np.uint8))\n # print(color)\n return image", "def _preprocessing(image) -> np.ndarray:\n # TODO: Turn mapping into generic function.\n processed_image = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n processed_image[~mask] = 255\n return processed_image", "def gen_background_mask( img ):\n\t\t\n\tif len( img.shape ) == 3: t = img[0]\n\telif len( img.shape ) == 2: t = img\n\n\tmask = img > filters.threshold_li(t)\n\n\treturn mask", "def binarize_image(img, verbose=False):\n h, w = img.shape[:2]\n\n # creat an empty image with the same size as the passed frame to the function\n binary_output = np.zeros(shape=(h, w), dtype=np.uint8)\n\n # using HSV, Find yellow lanes in the image (min [0, 70, 70] and max [50, 255, 255] were selected to detect yellow at all conditions in the image)\n HSV_yellow_lanes = hsv_select(img, thresh=(\n [0, 100, 100], [50, 255, 255]), channel='all', verbose=False)\n\n #HSV_yellow_lanes = thresh_frame_in_HSV(img, yellow_HSV_th_min, yellow_HSV_th_max, verbose=False)\n\n # add the yellow mask to the binary image\n binary_output = np.logical_or(binary_output, HSV_yellow_lanes)\n\n # using Histogram Equalization, Find white lanes in the image\n histo_white_lanes = histo_image(img, verbose=False)\n\n # add the white mask to the binary image\n binary_output = np.logical_or(binary_output, histo_white_lanes)\n\n # apply sobel mask to the image\n sobel_mask = abs_sobel_thresh(\n img, orient='xy', sobel_kernel=9, thresh=(50, 200), verbose=False)\n\n # apply a light morphology to \"fill the gaps\" in the binary image\n kernel = np.ones((6, 6), np.uint8)\n closing = cv2.morphologyEx(sobel_mask.astype(\n np.uint8), cv2.MORPH_CLOSE, kernel)\n\n kernel = np.ones((2, 2), np.uint8)\n opening = cv2.morphologyEx(closing.astype(\n np.uint8), cv2.MORPH_OPEN, kernel)\n\n # add the sobel mask to the binary image\n binary_output = np.logical_or(opening, binary_output)\n\n # using HLS, Find lanes in the image\n hls_s_binary = hls_select(img, thresh=(200, 255), channel='S')\n\n # add the HLS mask to the binary image\n binary_output = np.logical_or(binary_output, hls_s_binary)\n\n # apply a light morphology to \"fill the gaps\" in the binary image\n kernel = np.ones((5, 5), np.uint8)\n binary_output = cv2.morphologyEx(binary_output.astype(\n np.uint8), cv2.MORPH_CLOSE, kernel)\n\n \n if verbose:\n f, ax = plt.subplots(2, 3)\n f.set_facecolor('white')\n\n ax[0, 0].imshow(cv2.cvtColor(img, code=cv2.COLOR_BGR2RGB), cmap='gray')\n ax[0, 0].set_title('Original')\n ax[0, 0].set_axis_off()\n \n ax[0, 1].imshow(HSV_yellow_lanes, cmap='gray')\n ax[0, 1].set_title('Yellow mask')\n ax[0, 1].set_axis_off()\n\n ax[0, 2].imshow(histo_white_lanes, cmap='gray')\n ax[0, 2].set_title('white mask')\n ax[0, 2].set_axis_off()\n\n ax[1, 2].imshow(sobel_mask, cmap='gray')\n ax[1, 2].set_title('Sobel mask')\n ax[1, 2].set_axis_off()\n\n ax[1, 0].imshow(binary_output, cmap='gray')\n ax[1, 0].set_title('OUTPUT')\n ax[1, 0].set_axis_off()\n\n ax[1, 1].imshow(closing, cmap='gray')\n ax[1, 1].set_title('closing')\n ax[1, 1].set_axis_off()\n\n # ax[1, 2].imshow(opening, cmap='gray')\n # ax[1, 2].set_title('opening')\n # ax[1, 2].set_axis_off()\n plt.show()\n return binary_output, closing, opening", "def generate_mask(self, thresh=50, b_ground=None):\n img = self.load_image()\n thresh = np.zeros(img.shape, \"uint8\")\n if b_ground is not None:\n img = img - b_ground\n thresh[img > 25] = 255\n mask = ndimage.morphology.binary_dilation(thresh).astype(\"uint8\")\n self.mask = 255*mask", "def create_mask(masking_positions, img, cells):\n left, right, top, bottom = masking_positions\n left += 1\n right += 1\n top += 1\n bottom += 1\n mask = np.ones((img.shape[0], img.shape[1]))*255\n\n # Compute corresponding positions and put zeros in the background part\n left = (img.shape[1]//cells[0])*left\n mask[:, :left] = 0\n right = img.shape[1]-(img.shape[1]//cells[0])*right\n mask[:, right:] = 0\n top = (img.shape[0]//cells[1])*top\n mask[:top, :] = 0\n bottom = img.shape[0]-(img.shape[0]//cells[0])*bottom\n mask[bottom:, :] = 0\n\n masks = mask.astype(np.uint8)\n return mask", "def region_of_interest(img, vertices):\n # defining a blank mask to start with\n mask = np.zeros_like(img)\n \n # defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n \n # filling pixels inside the polygon defined by \"vertices\" with the fill color\n cv2.fillPoly(mask, vertices, ignore_mask_color)\n # returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def get_masked_image(normal_image, segmented_image):\n color = [128, 128, 128]\n # create boundaries, same color but needs two arguments later on\n lower = np.array(color, dtype=\"uint8\")\n upper = np.array(color, dtype=\"uint8\")\n\n # find the colors within the boundaries and apply the mask\n mask = cv2.inRange(segmented_image, lower, upper)\n output = cv2.bitwise_and(normal_image, normal_image, mask = mask)\n\n folder = \"static/images/cropped\"\n os.system(\"rm %s/*.png\" % (folder))\n\n name = next(tempfile._get_candidate_names())\n path = \"%s/%s.png\" % (folder, name)\n cv2.imwrite(path, output)\n return path", "def binary_mask_to_polygon(binary_mask, tolerance=0):\n polygons = []\n # pad mask to close contours of shapes which start and end at an edge\n padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0)\n contours = measure.find_contours(padded_binary_mask, 0.5)\n contours = np.subtract(contours, 1)\n for contour in contours:\n contour = close_contour(contour)\n contour = measure.approximate_polygon(contour, tolerance)\n if len(contour) < 3:\n continue\n contour = np.flip(contour, axis=1)\n segmentation = contour\n # after padding and subtracting 1 we may get -0.5 points in our segmentation\n segmentation = [np.clip(i,0.0,i).tolist() for i in segmentation]\n polygons.append(segmentation)\n\n return polygons", "def __generate_mask(self):\n mask = np.concatenate([np.ones(len(self.fixed[0])),\n np.zeros(self.num_points),\n np.ones(len(self.fixed[1]))])\n return mask", "def region_of_interest(img, vertices):\n\n # defining a blank mask to start with\n mask = np.zeros_like(img)\n\n # defining a 3 channel or 1 channel color to fill the mask with depending on the input image\n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n\n # filling pixels inside the polygon defined by \"vertices\" with the fill color\n cv2.fillPoly(mask, vertices, ignore_mask_color)\n\n # returning the image only where mask pixels are nonzero\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def binary_contour_image_filter(*args, **kwargs):\n import itk\n instance = itk.BinaryContourImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def drawContours(img, cnt, color=(0, 255, 0), thickness=2):\n\tcv2.drawContours(img, cnt, -1, color, thickness)", "def mask(self):", "def findContours(img, thresh=127, val=255):\n\tgray = grayscale(img)\n\tret, binary = cv2.threshold(gray, thresh, val, cv2.THRESH_BINARY_INV)\n\tbina, contours, hierarchy = cv2.findContours(binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\terrorCheckContour(contours)\n\treturn sort(contours)" ]
[ "0.7214323", "0.70771843", "0.694643", "0.69156", "0.6753374", "0.6689051", "0.66160184", "0.66025215", "0.65440816", "0.6517318", "0.65129876", "0.64886975", "0.6453866", "0.64246404", "0.64205784", "0.6411343", "0.63934743", "0.6383716", "0.6381961", "0.6374727", "0.63475114", "0.6322776", "0.6322223", "0.6308395", "0.63027936", "0.62911636", "0.6288618", "0.6286578", "0.6276172", "0.6275158", "0.6243866", "0.6234386", "0.6187722", "0.61822885", "0.6181642", "0.6181642", "0.6174077", "0.61559904", "0.6154584", "0.6149103", "0.6139498", "0.61356425", "0.6131511", "0.6115211", "0.6099027", "0.60795575", "0.6073357", "0.6058148", "0.60516083", "0.6043126", "0.6022535", "0.602047", "0.6019144", "0.60157484", "0.60040426", "0.6002191", "0.6001188", "0.59984076", "0.5989775", "0.5974962", "0.5974962", "0.5974962", "0.5973781", "0.5971172", "0.5967424", "0.5962666", "0.59618527", "0.59550864", "0.59550864", "0.59550864", "0.59550864", "0.59550864", "0.59550864", "0.59550864", "0.59549135", "0.5947326", "0.59452677", "0.5931026", "0.59217614", "0.59215194", "0.59134734", "0.59118587", "0.5906787", "0.5906134", "0.5897264", "0.58963704", "0.58953065", "0.5886708", "0.5875134", "0.5875115", "0.5874162", "0.58732104", "0.5868482", "0.5867262", "0.58636624", "0.5863203", "0.5861795", "0.5858125", "0.58523893", "0.58495605" ]
0.7433139
0
Load saved output from QuPath img import & processing function basically just a stupid wrapper for json.load for now
def load_co_registration_data_from_json(filename: str) -> Dict[str, CoRegistrationData]: with open(filename, "r") as json_file: data = json.load(json_file) co_reg_data = {} for index, data in data.items(): co_reg_data[index] = CoRegistrationData( name=str(data['name']), target_w=int(data['target_w']), target_h=int(data['target_h']), transform_matrix=np.array(data['transform_matrix']), moving_img_name=str(data['moving_img_name']) ) return co_reg_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def loadjson(path, objectsofinterest, img):\n with open(path) as data_file:\n data = json.load(data_file)\n # print (path)\n pointsBelief = []\n boxes = []\n points_keypoints_3d = []\n points_keypoints_2d = []\n pointsBoxes = []\n poses = []\n centroids = []\n\n translations = []\n rotations = []\n points = []\n\n for i_line in range(len(data['objects'])):\n info = data['objects'][i_line]\n if not objectsofinterest is None and \\\n not objectsofinterest in info['class'].lower():\n continue\n\n box = info['bounding_box']\n boxToAdd = []\n\n boxToAdd.append(float(box['top_left'][0]))\n boxToAdd.append(float(box['top_left'][1]))\n boxToAdd.append(float(box[\"bottom_right\"][0]))\n boxToAdd.append(float(box['bottom_right'][1]))\n boxes.append(boxToAdd)\n\n boxpoint = [(boxToAdd[0], boxToAdd[1]), (boxToAdd[0], boxToAdd[3]),\n (boxToAdd[2], boxToAdd[1]), (boxToAdd[2], boxToAdd[3])]\n\n pointsBoxes.append(boxpoint)\n\n # 3dbbox with belief maps\n points3d = []\n\n pointdata = info['projected_cuboid']\n for p in pointdata:\n points3d.append((p[0], p[1]))\n\n # Get the centroids\n pcenter = info['projected_cuboid_centroid']\n\n points3d.append((pcenter[0], pcenter[1]))\n pointsBelief.append(points3d)\n points.append(points3d + [(pcenter[0], pcenter[1])])\n centroids.append((pcenter[0], pcenter[1]))\n\n # load translations\n location = info['location']\n translations.append([location[0], location[1], location[2]])\n\n # quaternion\n rot = info[\"quaternion_xyzw\"]\n rotations.append(rot)\n\n return {\n \"pointsBelief\": pointsBelief,\n \"rotations\": rotations,\n \"translations\": translations,\n \"centroids\": centroids,\n \"points\": points,\n \"keypoints_2d\": points_keypoints_2d,\n \"keypoints_3d\": points_keypoints_3d,\n }", "def loadjson(path, objectsofinterest, img):\n with open(path) as data_file: \n data = json.load(data_file)\n # print (path)\n pointsBelief = []\n boxes = []\n points_keypoints_3d = []\n points_keypoints_2d = []\n pointsBoxes = []\n poses = []\n centroids = []\n\n translations = []\n rotations = []\n points = []\n\n for i_line in range(len(data['objects'])):\n info = data['objects'][i_line]\n if not objectsofinterest is None and \\\n not objectsofinterest in info['class'].lower():\n continue \n \n box = info['bounding_box']\n boxToAdd = []\n\n boxToAdd.append(float(box['top_left'][0]))\n boxToAdd.append(float(box['top_left'][1]))\n boxToAdd.append(float(box[\"bottom_right\"][0]))\n boxToAdd.append(float(box['bottom_right'][1]))\n boxes.append(boxToAdd)\n\n boxpoint = [(boxToAdd[0],boxToAdd[1]),(boxToAdd[0],boxToAdd[3]),\n (boxToAdd[2],boxToAdd[1]),(boxToAdd[2],boxToAdd[3])]\n\n pointsBoxes.append(boxpoint)\n \n # 3dbbox with belief maps\n points3d = []\n \n pointdata = info['projected_cuboid']\n for p in pointdata:\n points3d.append((p[0],p[1]))\n\n # Get the centroids\n pcenter = info['projected_cuboid_centroid']\n\n points3d.append ((pcenter[0],pcenter[1]))\n pointsBelief.append(points3d)\n points.append (points3d + [(pcenter[0],pcenter[1])])\n centroids.append((pcenter[0],pcenter[1]))\n\n # load translations\n location = info['location']\n translations.append([location[0],location[1],location[2]])\n\n # quaternion\n rot = info[\"quaternion_xyzw\"]\n rotations.append(rot)\n\n return {\n \"pointsBelief\":pointsBelief, \n \"rotations\":rotations,\n \"translations\":translations,\n \"centroids\":centroids,\n \"points\":points,\n \"keypoints_2d\":points_keypoints_2d,\n \"keypoints_3d\":points_keypoints_3d,\n }", "def _json_import(self, imppath):\n # TODO: Settle on JSON format for colortable\n pass", "def loadJson (self, path):\n\n # get all lines in json, concatenate then into a big string then parse it\n with open(path, \"r\") as file_content:\n all_lines = file_content.readlines()\n all_content_str = \"\".join(all_lines)\n json_dict = json.loads(all_content_str)\n self.tile_reprs = list(json_dict['tiles']['structural-tiles'].keys())\n\n # remove this empty char\n self.tile_reprs.remove(\"-\")", "def _preprocess(self):\n print(\"Note: if root path is changed, the previously generated json files need to be re-generated (delete them first)\")\n if osp.exists(self.imgs_labeled_dir) and \\\n osp.exists(self.imgs_detected_dir) and \\\n osp.exists(self.split_classic_det_json_path) and \\\n osp.exists(self.split_classic_lab_json_path) and \\\n osp.exists(self.split_new_det_json_path) and \\\n osp.exists(self.split_new_lab_json_path):\n return\n\n mkdir_if_missing(self.imgs_detected_dir)\n mkdir_if_missing(self.imgs_labeled_dir)\n\n print(\"Extract image data from {} and save as png\".format(self.raw_mat_path))\n mat = h5py.File(self.raw_mat_path, 'r')\n\n def _deref(ref):\n return mat[ref][:].T\n\n def _process_images(img_refs, campid, pid, save_dir):\n img_paths = [] # Note: some persons only have images for one view\n for imgid, img_ref in enumerate(img_refs):\n img = _deref(img_ref)\n # skip empty cell\n if img.size == 0 or img.ndim < 3: continue\n # images are saved with the following format, index-1 (ensure uniqueness)\n # campid: index of camera pair (1-5)\n # pid: index of person in 'campid'-th camera pair\n # viewid: index of view, {1, 2}\n # imgid: index of image, (1-10)\n viewid = 1 if imgid < 5 else 2\n img_name = '{:01d}_{:03d}_{:01d}_{:02d}.png'.format(campid+1, pid+1, viewid, imgid+1)\n img_path = osp.join(save_dir, img_name)\n imageio.imwrite(img_path, img)\n img_paths.append(img_path)\n return img_paths\n\n def _extract_img(name):\n print(\"Processing {} images (extract and save) ...\".format(name))\n meta_data = []\n imgs_dir = self.imgs_detected_dir if name == 'detected' else self.imgs_labeled_dir\n for campid, camp_ref in enumerate(mat[name][0]):\n camp = _deref(camp_ref)\n num_pids = camp.shape[0]\n for pid in range(num_pids):\n img_paths = _process_images(camp[pid,:], campid, pid, imgs_dir)\n assert len(img_paths) > 0, \"campid{}-pid{} has no images\".format(campid, pid)\n meta_data.append((campid+1, pid+1, img_paths))\n print(\"done camera pair {} with {} identities\".format(campid+1, num_pids))\n return meta_data\n\n meta_detected = _extract_img('detected')\n meta_labeled = _extract_img('labeled')\n\n def _extract_classic_split(meta_data, test_split):\n train, test = [], []\n num_train_pids, num_test_pids = 0, 0\n num_train_imgs, num_test_imgs = 0, 0\n for i, (campid, pid, img_paths) in enumerate(meta_data):\n \n if [campid, pid] in test_split:\n for img_path in img_paths:\n camid = int(osp.basename(img_path).split('_')[2])\n test.append((img_path, num_test_pids, camid))\n num_test_pids += 1\n num_test_imgs += len(img_paths)\n else:\n for img_path in img_paths:\n camid = int(osp.basename(img_path).split('_')[2])\n train.append((img_path, num_train_pids, camid))\n num_train_pids += 1\n num_train_imgs += len(img_paths)\n return train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs\n\n print(\"Creating classic splits (# = 20) ...\")\n splits_classic_det, splits_classic_lab = [], []\n for split_ref in mat['testsets'][0]:\n test_split = _deref(split_ref).tolist()\n\n # create split for detected images\n train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \\\n _extract_classic_split(meta_detected, test_split)\n splits_classic_det.append({\n 'train': train, 'query': test, 'gallery': test,\n 'num_train_pids': num_train_pids, 'num_train_imgs': num_train_imgs,\n 'num_query_pids': num_test_pids, 'num_query_imgs': num_test_imgs,\n 'num_gallery_pids': num_test_pids, 'num_gallery_imgs': num_test_imgs,\n })\n\n # create split for labeled images\n train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \\\n _extract_classic_split(meta_labeled, test_split)\n splits_classic_lab.append({\n 'train': train, 'query': test, 'gallery': test,\n 'num_train_pids': num_train_pids, 'num_train_imgs': num_train_imgs,\n 'num_query_pids': num_test_pids, 'num_query_imgs': num_test_imgs,\n 'num_gallery_pids': num_test_pids, 'num_gallery_imgs': num_test_imgs,\n })\n \n write_json(splits_classic_det, self.split_classic_det_json_path)\n write_json(splits_classic_lab, self.split_classic_lab_json_path)\n\n def _extract_set(filelist, pids, pid2label, idxs, img_dir, relabel):\n tmp_set = []\n unique_pids = set()\n for idx in idxs:\n img_name = filelist[idx][0]\n camid = int(img_name.split('_')[2])\n pid = pids[idx]\n if relabel: pid = pid2label[pid]\n img_path = osp.join(img_dir, img_name)\n tmp_set.append((img_path, int(pid), camid))\n unique_pids.add(pid)\n return tmp_set, len(unique_pids), len(idxs)\n\n def _extract_new_split(split_dict, img_dir):\n train_idxs = split_dict['train_idx'].flatten() - 1 # index-0\n pids = split_dict['labels'].flatten()\n train_pids = set(pids[train_idxs])\n pid2label = {pid: label for label, pid in enumerate(train_pids)}\n query_idxs = split_dict['query_idx'].flatten() - 1\n gallery_idxs = split_dict['gallery_idx'].flatten() - 1\n filelist = split_dict['filelist'].flatten()\n train_info = _extract_set(filelist, pids, pid2label, train_idxs, img_dir, relabel=True)\n query_info = _extract_set(filelist, pids, pid2label, query_idxs, img_dir, relabel=False)\n gallery_info = _extract_set(filelist, pids, pid2label, gallery_idxs, img_dir, relabel=False)\n return train_info, query_info, gallery_info\n\n print(\"Creating new splits for detected images (767/700) ...\")\n train_info, query_info, gallery_info = _extract_new_split(\n loadmat(self.split_new_det_mat_path),\n self.imgs_detected_dir,\n )\n splits = [{\n 'train': train_info[0], 'query': query_info[0], 'gallery': gallery_info[0],\n 'num_train_pids': train_info[1], 'num_train_imgs': train_info[2],\n 'num_query_pids': query_info[1], 'num_query_imgs': query_info[2],\n 'num_gallery_pids': gallery_info[1], 'num_gallery_imgs': gallery_info[2],\n }]\n write_json(splits, self.split_new_det_json_path)\n\n print(\"Creating new splits for labeled images (767/700) ...\")\n train_info, query_info, gallery_info = _extract_new_split(\n loadmat(self.split_new_lab_mat_path),\n self.imgs_labeled_dir,\n )\n splits = [{\n 'train': train_info[0], 'query': query_info[0], 'gallery': gallery_info[0],\n 'num_train_pids': train_info[1], 'num_train_imgs': train_info[2],\n 'num_query_pids': query_info[1], 'num_query_imgs': query_info[2],\n 'num_gallery_pids': gallery_info[1], 'num_gallery_imgs': gallery_info[2],\n }]\n write_json(splits, self.split_new_lab_json_path)", "def load(self, input):", "def load_data(self) -> None:", "def load(self):", "def load_image(self, **kwargs):\n ...", "def __make_processing(self, img_name, abspath_dir_img, id_foot):\n data = {}\n data['data'] = ImageInfo.get_date(abspath_dir_img)\n data['total_part'] = TOTAL_PART\n data['nuvens'] = ImageInfo.get_cloud(abspath_dir_img)\n self.__make_tms(abspath_dir_img)\n data['geom'] = self.__make_footprint(abspath_dir_img, shp_out=id_foot)\n abspath_rgb, img_name_rgb = ImageInfo.get_image_rgb(\n abspath_dir_img, img_name\n )\n data['tms'] = ImageInfo.get_xml_tms(img_name_rgb)\n data['image'] = img_name_rgb\n data['quicklook'] = self.__make_png(abspath_rgb)\n data['path'] = ImageInfo.get_path(img_name)\n return data", "def load_image(self, image_id):\n info = self.image_info[image_id]\n label_path = info['path']\n\n # 读取json文件\n with open(os.path.join(self.DATA_ROOT_DIR, label_path), encoding='utf-8') as json_file:\n labelmeJson = json.load(json_file)\n # height = labelmeJson['imageHeight']\n # width = labelmeJson['imageWidth']\n # shape_list = labelmeJson['shapes']\n image = self.img_b64_to_arr(labelmeJson['imageData'])\n # bg_color = np.array(info['bg_color']).reshape([1, 1, 3])\n # image = np.ones([labelmeJson['height'], labelmeJson['width'], 3], dtype=np.uint8)\n # image = image * bg_color.astype(np.uint8)\n #\n # for shape, color, dims in info['shapes']:\n # image = self.draw_shape(image, shape, dims, color)\n\n return image", "def load_data(self):", "def repair_json(in_file, out_file, path_to_img):\n with open(in_file, 'rb') as f:\n data = f.readlines()\n\n # delete all \\n characters\n data = map(lambda x: x.rstrip(), data)\n\n # make one JSON object per row\n json_rows = []\n start = -1\n while True:\n try:\n start = data.index('{', start + 1)\n end = data.index('}', start)\n row = ''.join(data[start:end+1])\n row = re.sub(\"\\\"Bmp\\\"\", \"\\\"Image\\\"\", row)\n row = re.sub(\".bmp\", \".jpg\", row)\n json_rows.append(row)\n start = end\n except ValueError:\n break\n\n # join all JSON objects into one comme delimited string enclosed in square brackets\n data_join_str = \"[\" + ','.join(json_rows) + \"]\"\n\n # create JSON object\n repaired_json = json.loads(data_join_str)\n for x in repaired_json:\n f = os.path.join(path_to_img, x['Image'])\n print x['Image']\n if not os.path.isfile(f):\n repaired_json.remove(x)\n # remove out_file if it exists\n try:\n os.remove(out_file)\n except OSError:\n pass\n except IOError:\n pass\n print 'dumping...'\n with open(out_file, 'w+') as f:\n json.dump(repaired_json, f)", "def _load(self):\n if self.file_path.exists():\n with open(self.file_path) as fid:\n self.data = json.load(fid)", "def load_raw_text():\n if not os.path.exists( os.path.join( DATA_HOME, RAW_TEXT_FILE ) ) or \\\n not os.path.exists( os.path.join( DATA_HOME, LABELS_FILE ) ):\n print( 'no prior files found. staring from scratch' )\n rev, rat = parse_json( os.path.join( DATA_HOME, JSON_FILE ) )\n y = np.array( rat )\n print( 'saving data to files' )\n pickle.dump( rev , open( os.path.join( DATA_HOME, RAW_TEXT_FILE ), 'wb' ) )\n pickle.dump( y , open( os.path.join( DATA_HOME, LABELS_FILE ), 'wb' ) )\n else:\n print( 'found raw text and labes. loading...' )\n rev = pickle.load( open( os.path.join( DATA_HOME, RAW_TEXT_FILE ), 'rb' ) )\n y = pickle.load( open( os.path.join( DATA_HOME, LABELS_FILE ), 'rb' ) )\n print( 'done' )\n \n return rev, y", "def load_file(self):\n self._check_setup()\n json_str = self.get_json_file()\n if json_str is None:\n return\n\n if not self._is_json_str():\n with open(json_str, 'r') as f:\n jf = json.load(f)\n else:\n jf = json.loads(json_str)\n\n\n self.jf = jf\n\n target = jf['target']\n if isinstance(target, str):\n target = eval(target)\n\n goal = jf['goal']\n if isinstance(goal, str):\n goal = eval(goal)\n\n self.gen_target_pos = np.array(target)\n self.gen_goal_pos = np.array(goal)\n\n if 'place_walls' in jf:\n self.place_walls = jf['place_walls']\n\n if self.get_is_rnd():\n self.rnd_map = jf['rnd']\n self.env_jf = jf['env']", "def lice_main(base_folder, s3_client):\n\n sql_credentials = json.load(open(os.environ[\"SQL_CREDENTIALS\"]))\n sql_engine = create_engine(\n \"postgresql://{}:{}@{}:{}/{}\".format(sql_credentials[\"user\"], sql_credentials[\"password\"],\n sql_credentials[\"host\"], sql_credentials[\"port\"],\n sql_credentials[\"database\"]))\n\n metadata = MetaData()\n # step 1 - download crops + json\n # get the two tables we care about\n fish_crops = Table('lati_fish_detections', metadata, autoload=True, autoload_with=sql_engine)\n lice_crops = Table('lati_fish_detections_lice_annotations_reconciled', metadata, autoload=True,\n autoload_with=sql_engine)\n\n # inner join on fish crop id\n # TODO @Thomas debug this\n query = select([fish_crops.c.image_key, lice_crops.c.lice_bbox_list]) \\\n .select_from(lice_crops.join(fish_crops, lice_crops.c.lati_fish_detections_id == fish_crops.c.id)) \\\n .where(and_(fish_crops.c.site_id == 23,\n lice_crops.c.lice_bbox_list != None,\n # func.json_array_length(lice_crops.c.lice_bbox_list) > 0,\n lice_crops.c.created_by == \"[email protected]\"))\n\n json_files = []\n counter = 0\n with sql_engine.connect() as conn:\n for row in conn.execute(query):\n\t if len(row) == 0:\n\t \tcontinue\n # [image_key, lice_json]\n results = {}\n key = row[0]\n _, farm, penid, date, image_name = key.split('/')\n results[\"key\"] = key\n results[\"farm\"] = farm\n results[\"penid\"] = penid\n results[\"date\"] = date\n results[\"image_name\"] = image_name\n results[\"detections\"] = row[1]\n results[\"processed\"] = False\n destination = os.path.join(base_folder, \"crops\", farm, date, penid)\n\n results[\"image_path\"] = os.path.join(destination, image_name)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n with open(os.path.join(destination, image_name.replace(\"jpg\", \"json\")), \"w\") as f:\n json.dump(results, f)\n if not os.path.isfile(os.path.join(destination, image_name)):\n s3_client.download_file(\"aquabyte-crops\", key, os.path.join(destination, image_name))\n counter += 1\n json_files.append(os.path.join(destination, image_name.replace(\"jpg\", \"json\")))\n print(\"{} new files have downloaded\".format(counter))\n\n # step 2 - create training and validation sets\n for jf in json_files:\n with open(jf, \"r\") as f:\n annotations = json.load(f)\n if annotations[\"processed\"]:\n continue\n image = io.imread(annotations[\"image_path\"])\n farm = annotations[\"farm\"]\n date = annotations[\"date\"]\n penid = annotations[\"penid\"]\n image_name = annotations[\"image_name\"]\n for (i, annotation) in enumerate(annotations['detections']):\n category = annotation['category']\n position = annotation['position']\n x1, height, y1, width = position[\"left\"], position[\"height\"], position[\"top\"], position[\"width\"]\n destination = os.path.join(base_folder, \"lice_only\", farm, date, penid, category)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n lice_name = image_name + \".lice_{}.jpg\".format(i)\n io.imsave(os.path.join(destination, lice_name), image[y1:y1+height, x1:x1+width, :])\n # tag as processed\n annotations[\"processed\"] = True\n with open(jf, \"w\") as f:\n json.dump(annotations, f)", "def load(image_path):\n out = None\n\n #####################################\n # START YOUR CODE HERE #\n #####################################\n # Use skimage io.imread\n out = io.imread(image_path)\n ######################################\n # END OF YOUR CODE #\n ######################################\n\n return out", "def LoadBatch(filename):", "def load_image_custom(self, image_id):\n \n info = self.image_info[image_id]\n filePath = info[\"path\"]\n \n filename = os.path.basename(filePath)\n filePath = os.path.dirname(os.path.dirname(filePath))\n \n image = []\n image.append(self.read_image(filePath + \"/artery/\" + filename)[:,:,0]) # artery phase\n image.append(self.read_image(filePath + \"/portal/\" + filename)[:,:,0]) # portal-venous phase\n image.append(self.read_image(filePath + \"/delay/\" + filename)[:,:,0]) # delay phase\n image = np.transpose(image,(1,2,0))\n \n return image, filename", "def process_image(self):\n pass", "def test_load_path(parser):\n doc = parser.load(pathlib.Path('jsonexamples') / 'small' / 'demo.json')\n doc.at_pointer('/Image/Width')", "def __call__(self, results):\r\n if isinstance(results['img'], str):\r\n results['filename'] = results['img']\r\n results['ori_filename'] = results['img']\r\n else:\r\n results['filename'] = None\r\n results['ori_filename'] = None\r\n img = mmcv.imread(results['img'])\r\n results['img'] = img\r\n results['img_fields'] = ['img']\r\n results['img_shape'] = img.shape\r\n results['ori_shape'] = img.shape\r\n return results", "def get_output_json(self, case_path):\r\n if not os.path.exists(case_path):\r\n logging.ERROR('the path of source files does not exist')\r\n else:\r\n self.case_path = os.path.abspath(case_path)\r\n self.case_json = os.path.join(self.case_path, 'output.json')\r\n self.case_image = os.path.join(self.case_path, 'images')\r\n self.num_name = os.path.abspath(self.case_path).split(sep='\\\\')[-2]\r\n self.chi_name = IdToChinese[self.num_name]\r\n\r\n with io.open(self.case_json, 'r', encoding='utf-8') as f:\r\n json_data = json.load(f)\r\n self.audioResult = json_data['data']['audioResult']\r\n self.docs = self.audioResult['docs']\r\n self.classify_four_w= self.audioResult['4W']\r\n self.approval_information = self.audioResult['approval_information']\r\n return True", "def loadimages(root):\n imgs = []\n\n def add_json_files(path,):\n for imgpath in glob.glob(path+\"/*.png\"):\n if exists(imgpath) and exists(imgpath.replace('png',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('png',\"json\")))\n for imgpath in glob.glob(path+\"/*.jpg\"):\n if exists(imgpath) and exists(imgpath.replace('jpg',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('jpg',\"json\")))\n\n def explore(path):\n if not os.path.isdir(path):\n return\n folders = [os.path.join(path, o) for o in os.listdir(path) \n if os.path.isdir(os.path.join(path,o))]\n if len(folders)>0:\n for path_entry in folders: \n explore(path_entry)\n else:\n add_json_files(path)\n\n explore(root)\n\n return imgs", "def process(self, image):", "def build(self):\n # open json, len 161,260\n at_json = open_json(self.json_names[0])\n link_json = open_json(self.json_names[1])\n # if need preprocessing, do it\n if self.args.img_preprocessing:\n print(\"resize imgs\")\n for i in tqdm(range(len(link_json))):\n image_url = \"image/\" + link_json[i][\"image_url_4x\"].split('/')[-1]\n img = Image.open(image_url)\n img = img.resize((224, 224))\n img.save(image_url)\n\n # create dataset\n itemlen = 0\n previd = 0\n for i in tqdm(range(len(link_json))):\n image_url = link_json[i][\"image_url_4x\"].split('/')[-1]\n uid = image_url.split('-')[0]\n if previd != uid:\n self.label.append(list(at_json[i].values())[2:])\n if i != 0:\n self.itemlen.append(itemlen)\n itemlen = 0\n self.input.append(f\"{self.frontpath}dataset/image/\" + image_url)\n previd = uid\n itemlen += 1\n self.itemlen.append(itemlen)\n self.separate()\n self.dataset = {\n 'train': self.train,\n 'validation': self.val,\n 'test': self.test\n }\n\n print('finished dataset')", "def getimgs():", "def process_data(output_folder):\n # select imgs\n img_folder = join(output_folder, 'img')\n select_img(output_folder, img_folder, 'HE-green')\n\n mask_folder = join(output_folder, 'mask')\n select_img(output_folder, mask_folder, '_EF5')", "def _file_loader(self) -> dict:\n cfg = None\n try:\n with open(self._path) as file:\n cfg = json.loads(file.read())\n except FileNotFoundError as e:\n print(e)\n exit(1)\n return cfg", "def _load_disk(self):", "def _load_disk(self):", "def transform(img_path):\n\n img = self.loader(img_path)\n return self.transform(img)", "def load_(self):\n path = os.path.join(os.path.dirname(self.arch_handler.dicomdir_path), self.SAVE_NAME)\n if not os.path.isfile(path):\n print(\"No history to load\")\n return\n with open(path, \"r\") as infile:\n data = json.load(infile)\n self.load(data['history'])\n self._edited = False", "def _update(self):\n print(\"Saving prediction json files...\")\n self._dump_json()\n print(\"Saving prediction json files done...\")\n print(\"Saving prediction images...\")\n self._dump_image()\n print(\"Saving prediction images done...\")", "def load_dataset(self, dataset_dir, json_path):\n # Add classes. We have only one class to add.\n self.add_class(\"glomerulus\", 1, \"glomerulus\")\n\n \n\n # Load annotations\n # VGG Image Annotator saves each image in the form:\n # { 'filename': '28503151_5b5b7ec140_b.jpg',\n # 'regions': {\n # '0': {\n # 'region_attributes': {},\n # 'shape_attributes': {\n # 'all_points_x': [...],\n # 'all_points_y': [...],\n # 'name': 'polygon'}},\n # ... more regions ...\n # },\n # 'size': 100202\n # }\n # We mostly care about the x and y coordinates of each region\n json_path=\"../../data/hubmap/train/aaa6a05cc.json\"", "def load(self):\n logger.debug(f\"Reading {self.path.name}\")\n self.label = int(Data.fromLabel(self.path.parent.name))\n self.image = skimg.data.imread(self.path)", "def preload(self):\n # load the objects\n for otype, fname in self.TYPE2NAME.items():\n if fname:\n path = os.path.join(self.anodir, fname + \".gz\")\n if os.path.isfile(path):\n with gzip.open(path, \"rt\") as handler:\n for line in handler:\n omap = json.loads(line)\n cls = self.TYPE2CLASS[otype]\n item = cls.from_map(omap, self)\n self.caches[otype][item.id] = item", "def load_from_geojson(self, filename_or_url):", "def loadImagesTag(self): \n dictionary = {}\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(GENDER_FRONT)\n dictionary[\"gender\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SKIN_BACK)\n dictionary[\"skin\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(HEAD_BACK)\n dictionary[\"head\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(BODY_BACK)\n dictionary[\"body\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(MASK_BACK)\n dictionary[\"mask\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(HAIR_BACK)\n dictionary[\"hair\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n if self.avatarConfiguration[\"gender\"] == \"boy\":\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SHIRT_BACK)\n dictionary[\"shirt\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(TROUSERS_BACK)\n dictionary[\"trousers\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SKIRT_BACK)\n dictionary[\"skirt\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n else:\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SHIRT_DISABLED)\n dictionary[\"shirt\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(TROUSERS_DISABLED)\n dictionary[\"trousers\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SKIRT_BACK)\n dictionary[\"skirt\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(SHOES_BACK)\n dictionary[\"shoes\"] = guiobjects.OcempImageButtonTransparent(imgPath)\n return dictionary", "def load_file(path):\n with open(path, \"rb\") as f: # bsps are binary files\n byte_list = f.read() # stores all bytes in bytes1 variable (named like that to not interfere with builtin names\n header = load_header(byte_list)\n skin_names = [byte_list[header.ofs_skins + 64 * x:header.ofs_skins + 64 * x + 64].decode(\"ascii\", \"ignore\") for x in range(header.num_skins)]\n triangles = load_triangles(byte_list[header.ofs_tris:header.ofs_frames], header)\n frames = load_frames(byte_list[header.ofs_frames:header.ofs_glcmds], header)\n texture_coordinates = load_texture_coordinates(byte_list[header.ofs_st:header.ofs_tris], header)\n gl_commands = load_gl_commands(byte_list[header.ofs_glcmds:header.ofs_end])\n # print(header)\n # print(skin_names)\n # print(triangles)\n # print(frames)\n # print(texture_coordinates)\n for i in range(len(texture_coordinates)):\n texture_coordinates[i].s = texture_coordinates[i].s/header.skinwidth\n texture_coordinates[i].t = texture_coordinates[i].t / header.skinheight\n # print(texture_coordinates)\n # print(header.num_xyz)\n for i_frame in range(len(frames)):\n for i_vert in range((header.num_xyz)):\n frames[i_frame].verts[i_vert].v[0] = frames[i_frame].verts[i_vert].v[0]*frames[i_frame].scale.x+frames[i_frame].translate.x\n frames[i_frame].verts[i_vert].v[1] = frames[i_frame].verts[i_vert].v[1] * frames[i_frame].scale.y + frames[i_frame].translate.y\n frames[i_frame].verts[i_vert].v[2] = frames[i_frame].verts[i_vert].v[2] * frames[i_frame].scale.z + frames[i_frame].translate.z\n model = md2_object(header, skin_names, triangles, frames, texture_coordinates, gl_commands)\n return model", "def run(self, input_path, output_path):\n # read in data\n try:\n image = Image.open(input_path)\n except Exception:\n raise ValueError(\"invalid image file\")\n \n # data preprocessing\n img = self.preprocess(image)\n \n # perform inference\n output = self.model(img)\n \n # post process\n results = self.postprocess(output)\n \n # save output\n results = {'results': results}\n\n with open(output_path, 'w') as out:\n json.dump(results, out)", "def _load(f, as_gray=False):\n # importing io is quite slow since it scans all the backends\n # we lazy import it here\n from skimage.io import imread\n return imread(os.path.join(data_dir, f), plugin='pil', as_gray=as_gray)", "def _process_image_local(raw_image_path):\n return process_image(\n original_rgb_image=raw.open.as_rgb(raw_image_path),\n original_image_filepath=raw_image_path,\n raw_images_dir=raw_images_dir,\n ROI_definitions=ROI_definitions,\n flat_field_filepath_or_none=flat_field_filepath_or_none,\n save_ROIs=save_ROIs,\n save_dark_frame_corrected_image=save_dark_frame_corrected_images,\n save_flat_field_corrected_image=save_flat_field_corrected_images,\n )", "def load(self, filename):\n pass", "def get_pic() -> str:\n with open(os.path.dirname(os.path.abspath(__file__))+'\\\\data.json', 'r') as test:\n test = json.load(test)\n pic = test['button_pic']\n return pic", "def load(self):\n\n raise NotImplementedError", "def load(path):\n pass", "def import_scene(file_path):\n\n pass", "def read_layout(outFile=None, linked=False, append=False):\n from cgl.plugins.blender.lumbermill import scene_object, LumberObject, import_file\n from cgl.core.utils.read_write import load_json\n import bpy\n\n if outFile == None:\n outFileObject = scene_object().copy(ext='json', task='lay', user='publish').latest_version()\n outFileObject.set_attr(filename='%s_%s_%s.%s' % (outFileObject.seq,\n outFileObject.shot,\n outFileObject.task,\n 'json'\n ))\n outFile = outFileObject.path_root\n # outFile = scene_object().path_root.replace(scene_object().ext, 'json')\n\n\n\n data = load_json(outFile)\n\n for p in data:\n print(p)\n data_path = data[p]['source_path']\n blender_transform = data[p]['blender_transform']\n\n transform_data = []\n for value in blender_transform:\n transform_data.append(value)\n\n print(transform_data)\n\n pathToFile = os.path.join(scene_object().root, data_path)\n lumberObject = LumberObject(pathToFile)\n\n\n\n if lumberObject.filename in bpy.data.libraries:\n lib = bpy.data.libraries[lumberObject.filename]\n bpy.data.batch_remove(ids=([lib]))\n import_file(lumberObject.path_root, linked=linked, append=append)\n else:\n import_file(lumberObject.path_root, linked=linked, append=append)\n\n if p not in bpy.context.collection.objects:\n obj = bpy.data.objects.new(p, None)\n bpy.context.collection.objects.link(obj)\n obj.instance_type = 'COLLECTION'\n obj.instance_collection = bpy.data.collections[lumberObject.asset]\n obj.location = (transform_data[0], transform_data[1], transform_data[2])\n obj.rotation_euler = (transform_data[3], transform_data[4], transform_data[5])\n obj.scale = (transform_data[6], transform_data[7], transform_data[8])\n\n bpy.ops.file.make_paths_relative()", "def __load(self, node, path):\n\n self.firstgid = node['firstgid']\n self.margin = node['margin']\n self.spacing = node['spacing']\n\n # convierte la ruta de la imagen en una ruta relativa al proyecto\n directory = os.path.dirname(path)\n self.image_path = os.path.join(directory, *node['image'].split(r'\\/'))\n self.image_path = os.path.normpath(self.image_path)", "def load_and_fix(self):\n # Read in json\n self.read_json()\n\n if self.size_to_load:\n self.data = self.data[:self.size_to_load]\n\n # Add names from database given _bsn:\n self.extend_dataframe_with_personnames()\n\n # Clean rows in the data_frame where the names column is empty - > thus no response from the database\n self.clean_none_response()\n\n # Fix path from A09.pdf to A09.json\n self.fix_path()\n\n # Get the correct names from the database response\n self.parse_names_from_response()\n\n print(\" --- Final Shape Data ---\")\n print(self.data.shape)\n print(list(self.data))\n\n # Save pickled object in ./data map\n self.save_obj(self.data, self.file_name_to_save)", "def loadimages(root):\n imgs = []\n\n def add_json_files(path, ):\n for imgpath in glob.glob(path + \"/*.png\"):\n if exists(imgpath) and exists(imgpath.replace('png', \"json\")):\n imgs.append((imgpath, imgpath.replace(path, \"\").replace(\"/\", \"\"),\n imgpath.replace('png', \"json\")))\n for imgpath in glob.glob(path + \"/*.jpg\"):\n if exists(imgpath) and exists(imgpath.replace('jpg', \"json\")):\n imgs.append((imgpath, imgpath.replace(path, \"\").replace(\"/\", \"\"),\n imgpath.replace('jpg', \"json\")))\n\n def explore(path):\n if not os.path.isdir(path):\n return\n folders = [os.path.join(path, o) for o in os.listdir(path)\n if os.path.isdir(os.path.join(path, o))]\n if len(folders) > 0:\n for path_entry in folders:\n explore(path_entry)\n else:\n add_json_files(path)\n\n explore(root)\n\n return imgs", "def import_image(self, file: str) -> Any:\n pass", "def preprocessing_objects(img_data, hierarchy_mapping, object_file_name='objects.p'):\n\n object_path_token = \"{0}.{1}.{2}\".format(DATA, VISUAL_GENOME, get_name_from_file(object_file_name))\n\n # Check if pickles are already created\n objects_path = FilesManager().get_file_path(object_path_token)\n\n if os.path.isfile(objects_path):\n Logger().log('File is already exist {0}'.format(objects_path))\n objects = FilesManager().load_file(object_path_token)\n return objects\n\n # Bad urls which should be sorted out\n bad_urls = get_bad_urls()\n\n # Get the whole objects from entities\n objects_lst = []\n correct_labels = hierarchy_mapping.keys()\n idx = 0\n for img in img_data:\n\n # Get the url image\n url = img.image.url\n\n # Sorting bad urls\n if url in bad_urls:\n continue\n\n # Get the objects per image\n objects = img.objects\n for object in objects:\n\n # Get the lable of object\n label = object.names[0]\n\n # Check if it is a correct label\n if label not in correct_labels:\n continue\n\n new_object_mapping = ObjectMapping(object.id, object.x, object.y, object.width, object.height, object.names,\n object.synsets, url)\n # Append the new objectMapping to objects_lst\n objects_lst.append(new_object_mapping)\n\n idx += 1\n Logger().log(\"Finished img: {}\".format(idx))\n\n # Pickle objects_lst\n objects_array = np.array(objects_lst)\n # Save the objects files to the disk\n FilesManager().save_file(object_path_token, objects_array)\n return objects_array", "def vl2img(vl_json_in, fileformat):\n\n # TODO would prefer to do this properly with pipes\n # using | and shell=True is safe though given no arguments\n executables = {\"svg\": \"vg2svg\", \"png\": \"vg2png\", \"pdf\": \"vg2pdf\"}\n try:\n exe = executables[fileformat]\n except KeyError as e:\n print(e.output)\n try:\n return subprocess.check_output(\"vl2vg | %s\" % exe, shell=True, input=vl_json_in)\n except subprocess.CalledProcessError as e:\n print(e.output)", "def get_imgs_from_json(self):\n # instantiate COCO specifying the annotations json path\n # Specify a list of category names of interest\n catIds = self.coco.getCatIds(catNms=[self.categ])\n print(\"catIds: \", catIds)\n # Get the corresponding image ids and images using loadImgs\n imgIds = self.coco.getImgIds(catIds=catIds)\n images = self.coco.loadImgs(imgIds)\n print(f\"{len(images)} images in '{self.json_path}' with '{self.categ}' instances\")\n self.catIds = catIds # list\n return images", "def parse_and_map(self, local_inet_path):\n for file_name in tqdm(self.filenames):\n # TODO: Add some log while processing data\n # Reads file name from full file path\n sliced_list = file_name.split(sep='/t')[-1].split(sep='_')\n self.data_dict['path'].append(file_name)\n self.data_dict['dataset'].append(sliced_list[1])\n self.data_dict['device'].append(sliced_list[2])\n self.data_dict['wn_id'].append(sliced_list[3])\n self.data_dict['im_id'].append(sliced_list[4])\n self.data_dict['eeg_session'].append(sliced_list[5])\n self.data_dict['global_session'].append(sliced_list[6].split(sep='.')[0])\n # File name: /MindBigData_Imagenet_Insight_n00007846_6247_1_785\n # Imagenet file path: /n00007846/n00007846_6247.JPEG\n file_name = str(sliced_list[3] + '_' + sliced_list[4] + '.JPEG')\n inet_path = os.path.join(local_inet_path, sliced_list[3], file_name)\n # If copy is true, data related local ImageNet images will be copied to separate folder\n if self.copy:\n try:\n # New file paths\n new_dir_path = os.path.join(self.copy_path, sliced_list[3])\n new_inet_path = os.path.join(new_dir_path, file_name)\n # Creates recursive folders in disk\n os.makedirs(new_dir_path, exist_ok=True, mode=0o771)\n # Copies file to destination\n shutil.copy(inet_path, new_inet_path)\n # Appends new file path to list\n self.data_dict['inet_path'].append(new_inet_path)\n except Exception as e:\n # TODO: More useful exception\n print(e)\n else:\n # Append local ImageNet path to list\n self.data_dict['inet_path'].append(inet_path)", "def load(self):\n #print self.fileInfo.name\n progress = self.progress\n filePath = os.path.join(self.fileInfo.dir,self.fileInfo.name)\n self.fileSize = os.path.getsize(filePath)\n #--Localize\n cells = self.cells\n records = self.records\n canSave = self.canSave\n skipObjRecords = self.skipObjRecords\n contTypes = set(['CREC','CNTC','NPCC'])\n levTypes = set(('LEVC','LEVI'))\n debrisIds = self.debrisIds\n debrisTypes = set(debrisIds.keys())\n #--Header\n inPath = os.path.join(self.fileInfo.dir,self.fileInfo.name)\n ins = Tes3Reader(self.fileInfo.name,file(inPath,'rb'))\n (name,size,delFlag,recFlag) = ins.unpackRecHeader()\n self.tes3 = Tes3(name,size,delFlag,recFlag,ins,True)\n if not canSave: del self.tes3.others[:]\n #--Progress info\n progress = self.progress\n progress(0.0,'Loading '+self.fileInfo.name)\n #--Raw data read\n while not ins.atEnd():\n #--Get record info and handle it\n (name,size,delFlag,recFlag) = ins.unpackRecHeader()\n #print \"%s [%d]\" % (name,size)\n #--CELL?\n if name == 'CELL':\n record = Cell(name,size,delFlag,recFlag,ins,0,skipObjRecords)\n cells.append(record)\n if canSave: records.append(record)\n #--Contents\n elif canSave and name in contTypes:\n if name == 'CREC':\n record = Crec(name,size,delFlag,recFlag,ins,True)\n elif name == 'CNTC':\n record = Cntc(name,size,delFlag,recFlag,ins,True)\n else:\n record = Npcc(name,size,delFlag,recFlag,ins,True)\n self.conts.append(record)\n self.conts_id[record.getId()] = record\n records.append(record)\n #--File Map\n elif name == 'FMAP':\n record = Fmap(name,size,delFlag,recFlag,ins)\n self.fmap = record\n records.append(record)\n #--Landscapes\n elif name == 'LAND':\n record = Land(name,size,delFlag,recFlag,ins)\n self.lands[record.getId()] = record\n records.append(record)\n #--Scripts\n elif canSave and name == 'SCPT':\n record = Scpt(name,size,delFlag,recFlag,ins,True)\n records.append(record)\n if record.getRef():\n self.refs_scpt[record] = record.getRef()\n #--Save debris info?\n elif name in debrisTypes:\n record = Record(name,size,delFlag,recFlag,ins)\n id = record.getId()\n if id:\n debrisIds[name].append(id.lower())\n if canSave:\n records.append(record)\n #--Skip Non-cell?\n elif not canSave:\n ins.seek(size,1,name)\n #--Keep non-cell?\n else:\n records.append(Record(name,size,delFlag,recFlag,ins))\n #--Done Reading\n ins.close()\n #--Analyze Cells\n cntCells = 0\n progress.setMax(len(self.cells))\n for cell in self.cells:\n cell.load(None,1)\n self.cells_id[cell.getId()] = cell\n if not canSave:\n cell.data = None #--Free some memory\n #--Progress\n cntCells += 1\n progress(cntCells)\n #--Scripts\n if self.refs_scpt:\n self.updateScptRefs()", "def get_raw_data(input_path, save_gt=False):\n # define variable for returning\n all_txts = [] # a list, each element is a dictionary\n coords = [] # a list, storing a image's all text region's coordinates which is clockwise\n num_txt = 0\n visual = False\n print('Parsing txt files')\n # txt_directory = os.path.join(input_path, 'text')\n # all_txt_files = [os.path.join(txt_directory, s) for s in os.listdir(txt_directory)]\n txtfiles = input_path + '/*.txt'\n all_txt_files = glob.glob(txtfiles)\n box_num = 0\n for txt in all_txt_files:\n with open(txt, 'r') as f:\n num_txt += 1\n for line in f:\n box_num += 1\n line_split = line.strip().split(',')\n # clockwise\n (x1, y1, x2, y2) = line_split[0:4]\n (x3, y3, x4, y4) = line_split[4:8]\n coords.append((x1, y1, x2, y2, x3, y3, x4, y4))\n txtfilepath = txt\n # using regular expression, get image file path\n # pattern = re.compile('text')\n # img_file_path = pattern.sub('image', txt)\n pattern = re.compile('txt')\n img_file_path = pattern.sub('jpg', txtfilepath)\n txt_data = {'imagePath': img_file_path, 'boxCoord': coords, 'boxNum': box_num}\n box_num = 0\n coords = []\n # image file wheater corresponding to text file, and image file is not empty then add\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path):\n all_txts.append(txt_data)\n # -----------------------visualizing-----------------------------------------\n # draw text region on image and save image\n # print text region on image for comparing gt and predicted results\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path) and save_gt:\n save_groudtruth(cv2.imread(img_file_path), txt_data['boxCoord'], img_file_path)\n\n # draw text region on image and show image\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path) and visual:\n visualize(cv2.imread(img_file_path), txt_data['boxCoord'], img_file_path)\n # -----------------------visualizing-----------------------------------------\n return all_txts, num_txt", "def load(self, path):\n pass", "def load(self, path):\n pass", "def _get_data(path):\n archive = np.load(path)\n images = archive['faceData']\n return images", "def load(path: str) -> Any:\n config = load_configs(path)\n config.reduce(config.MUTATIONS)\n config.reduce('_reduce')\n for reduces in config.output.get('_reduce') or []:\n for item in reduces or [None]:\n config.reduce(item)\n\n output = config.output\n for post_process in output.get('_post_process') or []:\n file_info = find(post_process)\n file_info.search(file_info.module)(output)\n return output", "def _load_jsons(self):\n items = []\n labels = []\n\n with open(self._ann_file, 'r') as fid:\n database = json.load(fid)\n # iterate through the annotations\n bbox_scale_list = []\n det_bbox_set = {}\n if self._det_bbox_file is not None:\n bbox_list = json.load(open(os.path.join(\n self._root, 'annotations', self._det_bbox_file + f'_protocol_{self.protocol}.json'), 'r'))\n for item in bbox_list:\n image_id = item['image_id']\n det_bbox_set[image_id] = item['bbox']\n\n for ann_image, ann_annotations in zip(database['images'], database['annotations']):\n ann = dict()\n for k, v in ann_image.items():\n assert k not in ann.keys()\n ann[k] = v\n for k, v in ann_annotations.items():\n ann[k] = v\n skip = False\n for name in self.block_list:\n if name in ann['file_name']:\n skip = True\n if skip:\n continue\n\n image_id = ann['image_id']\n\n width, height = ann['width'], ann['height']\n if self._det_bbox_file is not None:\n xmin, ymin, xmax, ymax = bbox_clip_xyxy(\n bbox_xywh_to_xyxy(det_bbox_set[ann['file_name']]), width, height)\n else:\n xmin, ymin, xmax, ymax = bbox_clip_xyxy(\n bbox_xywh_to_xyxy(ann['bbox']), width, height)\n\n R, t = np.array(ann['cam_param']['R'], dtype=np.float32), np.array(\n ann['cam_param']['t'], dtype=np.float32)\n f, c = np.array(ann['cam_param']['f'], dtype=np.float32), np.array(\n ann['cam_param']['c'], dtype=np.float32)\n\n joint_world = np.array(ann['keypoints_world'])\n joint_world = self.add_thorax(joint_world)\n joint_cam = np.zeros((self.num_joints, 3))\n for j in range(self.num_joints):\n joint_cam[j] = world2cam(joint_world[j], R, t)\n\n joint_img = cam2pixel(joint_cam, f, c)\n joint_img[:, 2] = joint_img[:, 2] - joint_cam[self.root_idx, 2]\n joint_vis = np.ones((self.num_joints, 3))\n\n root_cam = joint_cam[self.root_idx]\n\n abs_path = os.path.join(self._root, 'images', ann['file_name'])\n\n tot_bone_len = 0\n for parent, child in self.skeleton:\n bl = np.sqrt(np.sum((joint_cam[parent] - joint_cam[child]) ** 2))\n tot_bone_len += bl\n\n items.append(abs_path)\n labels.append({\n 'bbox': (xmin, ymin, xmax, ymax),\n 'img_id': image_id,\n 'img_path': abs_path,\n 'width': width,\n 'height': height,\n 'joint_img': joint_img,\n 'joint_vis': joint_vis,\n 'joint_cam': joint_cam,\n 'root_cam': root_cam,\n 'tot_bone_len': tot_bone_len,\n 'f': f,\n 'c': c\n })\n bbox_scale_list.append(max(xmax - xmin, ymax - ymin))\n\n return items, labels", "def _load(self, pkgpart, part_dict):\n # call parent to do generic aspects of load\n super(Image, self)._load(pkgpart, part_dict)\n # set file extension\n self.__ext = posixpath.splitext(pkgpart.partname)[1]\n # return self-reference to allow generative calling\n return self", "def test_load_jpg():\n parameters = {'path': 'green-dot.jpg'}\n\n images.load(parameters)", "def read(self, path):\n\n assert path.endswith('.'+FILETYPE_SKETCH)\n fileName = path.split('/')[-1] # Use file name as document name and storage of images\n\n skf = SketchFile(path)\n\n # Construct the directory name to store images. Create the directory if it does not exist.\n # aPath/fileName.sketch --> aPath/fileName_images/\n # Answer the newly constructed image path.\n imagesPath = skf.imagesPath\n if not os.path.exists(imagesPath):\n os.makedirs(imagesPath)\n\n zf = zipfile.ZipFile(path, mode='r') # Open the file.sketch as Zip.\n zipInfo = zf.NameToInfo\n\n # Set general document info\n if DOCUMENT_JSON in zipInfo:\n fc = zf.read(DOCUMENT_JSON).decode(\"utf-8\")\n d = json.loads(fc)\n skf.document = SketchDocument(parent=skf, **d)\n else:\n return None # Cannot readw this file.\n\n # Set general user info\n if USER_JSON in zipInfo:\n fc = zf.read(USER_JSON).decode(\"utf-8\")\n d = json.loads(fc)\n skf.user = SketchUser(parent=skf, **d)\n\n # Read pages and build self.imagesId2Path dictionary, as we find sId-->name relations.\n for key in zipInfo:\n if key.startswith(PAGES_JSON): # This much be a page.\n fc = zf.read(key).decode(\"utf-8\")\n sketchPageInfo = json.loads(fc)\n # Reading pages/layers will find all docment images, and store them in self.imagesId2Path\n sketchPage = SketchPage(parent=skf, **sketchPageInfo)\n skf.pages[sketchPage.do_objectID] = sketchPage\n\n # Set general meta info\n if META_JSON in zipInfo:\n fc = zf.read(META_JSON).decode(\"utf-8\")\n d = json.loads(fc)\n skf.meta = SketchMeta(parent=skf, **d)\n\n # Find all images used in the file tree, so we can save them with their layer name.\n # Note that for now this is not a safe method, in case there are layers with\n # the same name in the document that refer to different bitmap files.\n # Also note that renaming the files in the _images/ folder, will disconnect them\n # from placements by bitmap layers.\n # TODO: Solve this later, creating unique file names.\n imageRefs = set()\n for image in skf.find(_class='bitmap'): # Recursively find all bitmap layers.\n imageBinary = zf.read(image.image._ref)\n # Save by internal name, that we already copied this image.\n imageRefs.add(image.image._ref)\n # If the image cannot be found by key, then use BitMap id as used in the file.\n # Export the image as separate file in _images directory.\n fbm = open(imagesPath + image.name + '.png', 'wb')\n fbm.write(imageBinary)\n fbm.close()\n\n # Now copy all remaining images (if not used in bitmap layer), under their own name.\n for key in zipInfo:\n if key.startswith(IMAGES_JSON) and key not in imageRefs:\n imageBinary = zf.read(key)\n fileName = key.split('/')[-1]\n fbm = open(imagesPath + fileName, 'wb')\n fbm.write(imageBinary)\n fbm.close()\n\n # Save any previews in the _images/ directory too.\n # Note that there may be an potential naming conflict here, in case a layer is called\n # \"preview\".\n # TODO: To be solved later.\n for key in zipInfo:\n if key.startswith(PREVIEWS_JSON): # This is a preview image\n previewBinary = zf.read(key)\n fp = open(imagesPath + key.split('/')[-1], 'wb') # Save in _images/ folder\n fp.write(previewBinary)\n fp.close()\n\n return skf", "def json_file():\r\n urlretrieve(URL, PATH)\r\n return PATH", "def json_file():\r\n urlretrieve(URL, PATH)\r\n return PATH", "def load(self, input):\n pass", "def _load_map(arg):\n\n if os.path.isfile(arg):\n output = {}\n with open(arg) as arg_f:\n for line in arg_f:\n key, val = line.strip().split('\\t')\n output[key] = val\n return output\n return json.loads(arg)", "def main():\n os.makedirs(PATH)\n fetch_data()\n convert_to_json(model_list, 'models.json', is_model=True)\n convert_to_json(backend_list, 'backends.json')\n convert_to_json(type_list, 'types.json')\n convert_to_json(featurizer_list, 'featurizers.json')", "def get_staged_images(self):\n with open(self.staging_path, \"r\") as f:\n return json.load(f)", "def do_json(pidx):\n status = \"200 OK\"\n if pidx == 0:\n name = f\"{BASEDIR}/scripts/__init__.py\"\n loader = importlib.machinery.SourceFileLoader(\"scripts\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.data\n else:\n name = get_script_name(pidx)\n if not os.path.isfile(name):\n sys.stderr.write(f\"autoplot/meta 404 {name}\\n\")\n status = \"404 Not Found\"\n output = \"\"\n response_headers = [\n (\"Content-type\", \"application/json\"),\n (\"Content-Length\", str(len(output))),\n ]\n return output, status, response_headers\n try:\n timing = get_timing(pidx)\n except Exception:\n timing = -1\n loader = importlib.machinery.SourceFileLoader(f\"p{pidx}\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.get_description()\n defaults = data.pop(\"defaults\", {\"_r\": \"t\", \"dpi\": \"100\"})\n data[\"maptable\"] = hasattr(mod, \"geojson\")\n data[\"highcharts\"] = hasattr(mod, \"highcharts\")\n data[\"timing[secs]\"] = timing\n\n # Setting to None disables\n if \"_r\" not in defaults or defaults[\"_r\"] is not None:\n data[\"arguments\"].append(\n dict(\n type=\"select\",\n options=FIGSIZES_NAMES,\n name=\"_r\",\n default=defaults.get(\"_r\", \"t\"),\n label=\"Image Pixel Size @100 DPI\",\n )\n )\n data[\"arguments\"].append(\n dict(\n type=\"int\",\n name=\"dpi\",\n default=defaults.get(\"dpi\", \"100\"),\n label=\"Image Resolution (DPI) (max 500)\",\n )\n )\n output = json.dumps(data)\n\n response_headers = [(\"Content-type\", \"application/json\")]\n return output, status, response_headers", "def deserialize_image(self, data, give_file_name):\r\n # Generate a random 8-character name\r\n # name = \"img_\" + self.generate_random_name() + \".png\"\r\n name = give_file_name + \".png\"\r\n file_path = os.path.join(self.temp_dir, name)\r\n img = Image.frombytes(data['mode'], data['size'], data['pixels'])\r\n img.save(file_path)\r\n return file_path", "def loader(path):\n img = np.load(path)\n img = img[1:4]\n if np.random.choice((True, False)):\n img = img[:, :, ::-1]\n img = np.array(img)\n if np.random.choice((True, False)):\n img = img[:, ::-1, :]\n img = np.array(img)\n\n img = img.transpose((1, 2, 0)) # pytorch is going to rotate it back\n return img", "def load_json_as_df(fldr, fname):\n \n \n split_fldr = os.path.join(fldr, 'data/splits')\n fpath = os.path.join(split_fldr, fname+'.json')\n tdf = pd.read_json(fpath, orient='index')\n tdf['image_name'] = tdf['image_path'] + '/' + tdf['image_name']\n tdf['species'] = tdf['genus'] + '_' + tdf['specific_epithet']\n \n img_fldr = os.path.join(fldr, 'data/images/')\n list_image_paths = glob.glob(img_fldr+'*/*.JPG')\n list_image_paths = [x.replace(img_fldr,'') for x in list_image_paths]\n check_image_bool = tdf['image_name'].apply(lambda x: x in list_image_paths)\n tdf = tdf[check_image_bool]\n \n print('loaded {}.json with shape {}\\n'.format(fname, tdf.shape))\n\n return tdf", "def __init__(self, json):\n\n self.height = json[\"height\"]\n self.width = json[\"width\"]\n self.src = json[\"src\"]", "def main():\n import shutil\n import json\n\n if not os.path.isdir(args.cache):\n # creation dossier cache\n os.mkdir(args.cache)\n\n if not os.path.exists(args.cache+'/overviews.json'):\n # creation fichier overviews.json a partir d'un fichier ressource\n shutil.copy2(args.overviews, args.cache+'/overviews.json')\n\n with open(args.cache+'/overviews.json') as json_overviews:\n overviews_dict = json.load(json_overviews)\n if not (\"list_OPI\" in overviews_dict):\n overviews_dict[\"list_OPI\"] = []\n\n out_raster_srs = gdal.osr.SpatialReference()\n out_raster_srs.ImportFromEPSG(overviews_dict['crs']['code'])\n conn_string = \"PG:host=\"+host+\" dbname=\"+database+\" user=\"+user+\" password=\"+password\n db_graph = gdal.OpenEx(conn_string, gdal.OF_VECTOR)\n if db_graph is None:\n raise ValueError(\"Connection to database failed\")\n list_filename = glob.glob(args.input)\n if verbose > 0:\n print(len(list_filename), \"fichier(s) a traiter\")\n\n try:\n with open(args.cache+'/cache_mtd.json', 'r') as inputfile:\n mtd = json.load(inputfile)\n except:\n mtd = {}\n\n cliche_dejaTraites = []\n for filename in list_filename:\n cliche = Path(filename).stem\n \n if (cliche in overviews_dict['list_OPI']):\n # OPI déja traitée\n cliche_dejaTraites.append(cliche)\n else:\n print('nouvelle image: ', filename)\n color = [randrange(255), randrange(255), randrange(255)]\n while (color[0] in mtd) and (color[1] in mtd[color[0]]) and (color[2] in mtd[color[0]][color[1]]):\n color = [randrange(255), randrange(255), randrange(255)]\n if color[0] not in mtd:\n mtd[color[0]] = {}\n if color[1] not in mtd[color[0]]:\n mtd[color[0]][color[1]] = {}\n mtd[color[0]][color[1]][color[2]] = cliche\n process_image(overviews_dict, db_graph, filename, color, out_raster_srs)\n # on ajout l'OPI traitée a la liste\n overviews_dict[\"list_OPI\"].append(cliche)\n\n with open(args.cache+'/cache_mtd.json', 'w') as outfile:\n json.dump(mtd, outfile)\n\n with open(args.cache+'/overviews.json', 'w') as outfile:\n json.dump(overviews_dict, outfile)\n\n print(\"\\n\", len(list_filename) - len(cliche_dejaTraites),\"/\",len(list_filename),\"OPI(s) ajoutée(s)\")\n if len(cliche_dejaTraites) > 0:\n print(cliche_dejaTraites, \"déjà traitées : OPI non recalculée(s)\")", "def load_and_preprocess_image(path):\n image = tf.io.read_file(path)\n return preprocess_image(image)", "def import_file(self):\n self.inputdata = json.load(self.infile)\n self.outputdata = self.inputdata\n self.logger.info('Json file Loaded')\n self.logger.debug(u'JSON:{d}'.format(d=self.inputdata))", "def read_image_data(self):\n\n for sequence_name in self.sequence_name_list:\n sequence = self.sequences[sequence_name]\n for image_id in sequence.image_id_list:\n sequence.image_dict[image_id].image_path = '{}{}/{}'.format(self.root_dir, self.name, sequence.image_dict[image_id].filename)", "def _runParser(self):\n with open(self.var('filePath')) as f:\n return json.load(f)", "def load():\n\n #: the file passed by the user in the post request\n file = request.files[\"file\"]\n\n # ensure that file exists\n if file == None:\n return BadRequest(\"No file given\")\n\n # ensure that file is readable\n try:\n file = json.loads(file.read())\n except UnicodeDecodeError:\n return BadRequest(\"Invalid file\")\n \n # ensure that the file can be indexed\n try:\n points = file[\"points\"]\n reg_json = file[\"reg\"]\n except TypeError:\n return BadRequest(\"Invalid file\")\n\n global no_dimensions\n #: number of dimensions\n no_dimensions = file[\"no_dimensions\"]\n\n\n # give each point an annotation weight if it does not already have one\n for i in range(0, len(points)):\n if points[i].get(\"annot_weight\") == None:\n points[i][\"annot_weight\"] = random.uniform(0, 1)\n\n global reg \n # regression model loaded from file\n if not reg_json:\n reg = jsonpickle.loads(reg_json)\n\n global tsne \n tsne = points\n \n return {\"points\": points, \"reg\": reg != None, \"no_dimensions\": no_dimensions}", "def load_data(path):\r\n\r\n _, ftype = os.path.splitext(path) #get fname and extension\r\n\r\n if os.path.isfile(path):\r\n with open(path) as f:\r\n\r\n if ftype == \".json\" or ftype == \".geojson\": #handle json\r\n data = json.load(f)\r\n # print(data)\r\n return data\r\n\r\n elif ftype == \".csv\": #handle csv with csv reader\r\n with open(path, newline ='') as csvfile:\r\n data = csv.DictReader(csvfile)\r\n return list(data)\r\n\r\n else:\r\n print(\"neither json or csv\")\r\n return None", "def loading_strategy(self):\n try:\n if not self.file_allowed():\n raise Exception('File type {} is not allowed'.format(self.get_ext()))\n\n with open(self._file_path, 'r') as outfile:\n content = outfile.read()\n\n if self.is_json() or self.is_template():\n return json.loads(content)\n\n if self.is_yaml():\n if self._yaml_replacements:\n for key, value in self._yaml_replacements.iteritems():\n content = content.replace(key, value)\n\n return yaml.load(content)\n else:\n return content\n\n except Exception as e:\n Oprint.err(e)\n else:\n raise Exception('File type {} is not allowed'.format(self.get_ext()))", "def __init__(self):\n self.signs = None\n self.command = None\n with open('signs_path.json', 'r') as f:\n self.signs = json.load(f)\n\n for sign, path in self.signs.items():\n self.signs[sign] = cv2.imread(path)", "def loadOrRun(filename,function,*args):\r\n def loadJSON(filename):\r\n \"saves the data object as a JSON string\"\r\n with open(filename,\"r\") as openFile:\r\n data = json.loads(openFile.read())\r\n return data\r\n\r\n def saveJSON(filename,data):\r\n \"saves the data object as a JSON string\"\r\n with open(filename,\"w\") as openFile:\r\n openFile.write(json.dumps(data))\r\n try:\r\n data = loadJSON(filename)\r\n except IOError:\r\n data = function(*args)\r\n saveJSON(filename,data)\r\n\r\n return data", "def processed_json_path(path):\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')", "def process_files_json():\n # chdir into beep root\n pwd = os.getcwd()\n os.chdir(os.environ.get(\"BEEP_ROOT\", \"/\"))\n\n meta_list = list(filter(lambda x: '_Metadata.csv' in x, os.listdir(SRC_DIR)))\n file_list = list(filter(lambda x: '.csv' in x if x not in meta_list else None, os.listdir(SRC_DIR)))\n all_list = list(filter(lambda x: '.csv' in x, os.listdir(SRC_DIR)))\n\n all_list = sorted(all_list)\n dumpfn(all_list, \"all_files.json\")\n\n [file_id, mapdf] = init_map(PROJECT_NAME, DEST_DIR)\n\n new_file_index = file_id\n\n for filename in tqdm(sorted(file_list)):\n # If the file has already been renamed another entry should not be made\n if mapdf['filename'].str.contains(filename).sum() > 0:\n continue\n old_file = os.path.join(SRC_DIR, filename)\n new_path = os.path.join(DEST_DIR, PROJECT_NAME)\n shutil.copy(old_file, new_path) # copy main data file\n shutil.copy(old_file.replace(\".csv\", '_Metadata.csv'), new_path) # copy meta data file\n\n if PROJECT_NAME == 'FastCharge':\n [date, channel_no, strname, protocol] = get_parameters_fastcharge(filename, SRC_DIR)\n elif PROJECT_NAME == 'ClosedLoopOED':\n [date, channel_no, strname, protocol] = get_parameters_oed(filename, SRC_DIR)\n else:\n raise ValueError(\"Unsupported PROJECT_NAME: {}\".format(PROJECT_NAME))\n\n df_dup = mapdf.set_index(['protocol', 'date'])\n if (protocol, date) in df_dup.index:\n row = mapdf[(mapdf['protocol'] == protocol) & (mapdf['date'] == date)]\n file_id = row['fid'].iloc[0]\n protocol = row['protocol'].iloc[0]\n date = row['date'].iloc[0]\n strname = row['strname'].iloc[0]\n else:\n file_id = new_file_index\n new_file_index = new_file_index + 1\n\n new_name = \"{}_{}_{}\".format(PROJECT_NAME, f'{file_id:06}', channel_no)\n new_file = os.path.join(DEST_DIR, PROJECT_NAME, \"{}.csv\".format(new_name))\n\n new_row = pd.DataFrame([[file_id, protocol, channel_no, date, strname,\n os.path.abspath(old_file),\n os.path.abspath(new_file)]],\n columns=METADATA_COLUMN_NAMES)\n mapdf = mapdf.append(new_row)\n\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename), new_file)\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename).replace(\".csv\", \"_Metadata.csv\"),\n new_file.replace(\".csv\", \"_Metadata.csv\"))\n\n mapdf.to_csv(os.path.join(DEST_DIR, PROJECT_NAME, PROJECT_NAME + \"map.csv\"), index=False)\n mapdf = mapdf.reset_index(drop=True)\n os.chdir(pwd)\n return json.dumps(mapdf.to_dict(\"list\"))", "def loadData(self,data_file):\n #Load the data from the json\n with open(data_file) as json_file: \n data = json.load(json_file)\n\n # Clear all instance variables\n self.dirs = []\n self.files = {}\n self.X = []\n self.Y = []\n self.output = {}\n\n # stored the data into the instance variables\n self.dirs = data['dirs'] #good\n self.files = data['files'] # good\n \n # self.output is a dict() with string:np.array\n output = data['output']\n for e in output:\n self.output[e] = np.array(output[e]) # -> fine\n #self.X is a list of np.arrays\n X = data['X']\n for x in X:\n self.X.append(np.array(x))# -> fine\n #self.Y is a list of np.arrays\n Y = data['Y']\n for y in Y:\n self.Y.append(list(y))# -> fine\n #Test prints, uncomment to test if data looks correct\n #print('self.dirs = ' + str(self.dirs))\n #print()\n #print('self.files = ' + str(self.files))\n #print()\n #print('self.output = ' + str(self.output))\n #print()\n #print('self.X = ' + str(self.X))\n #print()\n #print('self.Y = ' + str(self.Y))\n #print()\n print('Preprocessed data loaded from ' + str(data_file))\n print(data['comment'])\n return", "def _remoteloadjson(path: str) -> JSONType:\n return json.loads(request.urlopen(path).read())", "def _load_eval(self, eval_path):\n with open(eval_path, 'r') as fb:\n images = list()\n setmap = {'0': set(), '1': set(), '2': set()}\n for line in fb.readlines():\n image, tag = line.split()\n setmap[tag].add(image)\n images.append(image)\n return images, setmap['0'], setmap['1'], setmap['2']", "def load_minimap(self):\n minimap_types = ['cover', 'fog']\n self.game_data[\"minimap\"] = {\"fog\": None, \"cover\": None}\n file_name = self.game_data[\"file_name\"].split(\".json\")[0]\n for minimap_type in minimap_types:\n file_name = f\"{file_name}-{minimap_type}.png\"\n self.game_data[\"minimap\"][minimap_type] = pg.image.load(\n path.join(self.saved_minimap, file_name)).convert_alpha()\n logger.info(\"Load the minimap %s\", file_name)", "def load_from_json(self, json_fp: str):\n # TODO:\n pass", "def main():\n with open(IMAGEPATH_LIST_PATH, \"rt\") as imagepath_list_handle:\n imagepath_list = [line.strip() for line in imagepath_list_handle.readlines()]\n\n object_detector = ObjectDetector(MODEL_PATH)\n\n dataset_json = []\n for imagepath in imagepath_list:\n image = scipy.misc.imread(imagepath)\n detections = object_detector.run(image)\n\n detections_json = {\"path\": imagepath, \"detections\": [det.to_dict() for det in detections]}\n dataset_json.append(detections_json)\n\n with open(DATASET_PATH, \"wt\") as json_handle:\n json.dump(dataset_json, json_handle, sort_keys=True, indent=4)", "def loadImage(name, size=0):\n path = os.path.join(PACKAGE_HOME, 'input', name)\n fd = open(path, 'rb')\n data = fd.read()\n fd.close()\n return data", "def Load(self, path):\n\n self.data = dict()\n\n if os.path.exists(self.cache_path):\n self.data = np.load(self.cache_path, allow_pickle=True)[()]\n\n if 'xlsx' in path:\n workBook = xlrd.open_workbook(path)\n sheet1_content1 = workBook.sheet_by_index(0)\n\n for i in tqdm(range(sheet1_content1.nrows)):\n Time = sheet1_content1.cell(i, 0).value\n Link = sheet1_content1.cell(i, 1).value\n Content = sheet1_content1.cell(i, 2).value\n\n id = Link[-16:]\n\n if not id in self.data:\n self.data[id] = dict()\n\n self.data[id]['time'] = Time\n self.data[id]['link'] = Link\n self.data[id]['post'] = Content\n\n elif 'json' in path:\n f = open(path, 'r', encoding='utf-8')\n text = f.read()\n data = json.loads(text)\n\n for v in data:\n id = v['link'][-16:]\n\n if not id in self.data:\n self.data[id] = dict()\n\n self.data[id]['time'] = v['Time']\n self.data[id]['address'] = v['address']\n self.data[id]['location'] = v['location']\n self.data[id]['post'] = v['post']\n self.data[id]['link'] = v['link']\n\n np.save(self.cache_path, self.data)", "def read(self):\n self.data = {}\n if path.isfile(self.json_file):\n with open(self.json_file) as data_file:\n self.data = json.load(data_file)\n data_file.close()\n if (self.custom_path and self.is_only\n and path.exists(self.custom_path)):\n self.data[\"icons_path\"].append(self.custom_path)\n self.check_paths()\n be_added = (len(self.data[\"icons_path\"]) > 0\n and len(self.data[\"app_path\"]) > 0)\n if be_added:\n self.dont_install = False\n if isinstance(self.data[\"icons\"], list):\n self.data[\"icons\"] = get_iterated_icons(self.data[\"icons\"])\n self.get_app_icons()", "def ingest_json_file(request):\n path = save_file(request) \n try:\n with open(path, encoding='utf-8') as f:\n data = json.loads(f.read())\n except Exception as e:\n log.error(log.exc(e))\n return None\n return data" ]
[ "0.60999244", "0.60512304", "0.5816437", "0.58107746", "0.5780941", "0.57445073", "0.5644108", "0.5631807", "0.56096995", "0.56022495", "0.55884826", "0.5587083", "0.5582107", "0.5529392", "0.55193985", "0.5491933", "0.54883915", "0.547414", "0.5427283", "0.54183984", "0.53994197", "0.5395166", "0.5394462", "0.53873545", "0.5387182", "0.53815347", "0.53576195", "0.5334842", "0.5321934", "0.5313884", "0.5310608", "0.5310608", "0.52981573", "0.5289559", "0.52790654", "0.5273125", "0.5260094", "0.5259582", "0.52574086", "0.52405864", "0.523915", "0.52323127", "0.5230542", "0.52273875", "0.5224637", "0.52211183", "0.5217093", "0.5216757", "0.51995873", "0.5194881", "0.51945394", "0.51860774", "0.5169386", "0.5165654", "0.515811", "0.5151825", "0.5151674", "0.51502556", "0.51488847", "0.5142326", "0.5140607", "0.5140607", "0.513651", "0.5134412", "0.51322585", "0.5130583", "0.5129482", "0.51253027", "0.5114443", "0.5114443", "0.51119924", "0.51119757", "0.5110627", "0.5107451", "0.510102", "0.50982684", "0.5098048", "0.5089106", "0.5087177", "0.5086778", "0.5085807", "0.5084216", "0.50798315", "0.50796825", "0.50695485", "0.5066782", "0.5061098", "0.5059396", "0.50564677", "0.50523883", "0.5052359", "0.5048568", "0.5048266", "0.5047723", "0.5047143", "0.5047063", "0.50465274", "0.5037718", "0.5036425", "0.5035316", "0.50290346" ]
0.0
-1
Save output from QuPath img import & processing function to JSON file
def save_co_registration_data_to_json(datasets: Dict[str, CoRegistrationData], output_file) -> str: to_json_tmp = {} for index, data in datasets.items(): to_json_tmp[index] = { 'name': data.name, 'target_w': data.target_w, 'target_h': data.target_h, 'transform_matrix': data.transform_matrix.tolist(), 'moving_img_name': data.moving_img_name } with open(output_file, "w") as data_file: json.dump(to_json_tmp, data_file, indent=4, sort_keys=True) return json.dumps(to_json_tmp, indent=4, sort_keys=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __make_processing(self, img_name, abspath_dir_img, id_foot):\n data = {}\n data['data'] = ImageInfo.get_date(abspath_dir_img)\n data['total_part'] = TOTAL_PART\n data['nuvens'] = ImageInfo.get_cloud(abspath_dir_img)\n self.__make_tms(abspath_dir_img)\n data['geom'] = self.__make_footprint(abspath_dir_img, shp_out=id_foot)\n abspath_rgb, img_name_rgb = ImageInfo.get_image_rgb(\n abspath_dir_img, img_name\n )\n data['tms'] = ImageInfo.get_xml_tms(img_name_rgb)\n data['image'] = img_name_rgb\n data['quicklook'] = self.__make_png(abspath_rgb)\n data['path'] = ImageInfo.get_path(img_name)\n return data", "def create_json(self):\n data = {\"image_id\": self.ids, \"img_path\": self.img_paths, \"bg\": self.bgs}\n if hasattr(self, \"bbox\"):\n data[\"bbox\"] = self.bbox\n if hasattr(self, \"masks\"):\n data[\"masks\"] = self.masks\n with open(f\"{self.save_path}{self.name}/json/images_info.json\", \"w\") as f:\n json.dump(data, f)", "def create_image_annot_json():\n filepath = 'dataset/image_data.json'\n img_list = json.load(open(filepath))\n result = dict()\n for img in img_list:\n vis_id = img['image_id']\n result[vis_id] = img\n\n dest_file_path = 'dataset/vis_image_annt.json'\n with open(dest_file_path, 'w') as fp:\n json.dump(result, fp)\n print(\"DONE! - Generated \" + dest_file_path)", "def process(self, image):", "def _update(self):\n print(\"Saving prediction json files...\")\n self._dump_json()\n print(\"Saving prediction json files done...\")\n print(\"Saving prediction images...\")\n self._dump_image()\n print(\"Saving prediction images done...\")", "def process_image(self):\n pass", "def run_json(self, plot_generator):\n\n # Run the script\n\n image_data, metadata = plot_generator.go()\n\n # Encode for http send\n encoded_image = base64.b64encode(image_data)\n\n # convert to json\n data = json.dumps({'data': encoded_image,\n 'metadata': metadata})\n \n\n\n\n # Write response\n self.wfile.write(data)", "def _preprocess(self):\n print(\"Note: if root path is changed, the previously generated json files need to be re-generated (delete them first)\")\n if osp.exists(self.imgs_labeled_dir) and \\\n osp.exists(self.imgs_detected_dir) and \\\n osp.exists(self.split_classic_det_json_path) and \\\n osp.exists(self.split_classic_lab_json_path) and \\\n osp.exists(self.split_new_det_json_path) and \\\n osp.exists(self.split_new_lab_json_path):\n return\n\n mkdir_if_missing(self.imgs_detected_dir)\n mkdir_if_missing(self.imgs_labeled_dir)\n\n print(\"Extract image data from {} and save as png\".format(self.raw_mat_path))\n mat = h5py.File(self.raw_mat_path, 'r')\n\n def _deref(ref):\n return mat[ref][:].T\n\n def _process_images(img_refs, campid, pid, save_dir):\n img_paths = [] # Note: some persons only have images for one view\n for imgid, img_ref in enumerate(img_refs):\n img = _deref(img_ref)\n # skip empty cell\n if img.size == 0 or img.ndim < 3: continue\n # images are saved with the following format, index-1 (ensure uniqueness)\n # campid: index of camera pair (1-5)\n # pid: index of person in 'campid'-th camera pair\n # viewid: index of view, {1, 2}\n # imgid: index of image, (1-10)\n viewid = 1 if imgid < 5 else 2\n img_name = '{:01d}_{:03d}_{:01d}_{:02d}.png'.format(campid+1, pid+1, viewid, imgid+1)\n img_path = osp.join(save_dir, img_name)\n imageio.imwrite(img_path, img)\n img_paths.append(img_path)\n return img_paths\n\n def _extract_img(name):\n print(\"Processing {} images (extract and save) ...\".format(name))\n meta_data = []\n imgs_dir = self.imgs_detected_dir if name == 'detected' else self.imgs_labeled_dir\n for campid, camp_ref in enumerate(mat[name][0]):\n camp = _deref(camp_ref)\n num_pids = camp.shape[0]\n for pid in range(num_pids):\n img_paths = _process_images(camp[pid,:], campid, pid, imgs_dir)\n assert len(img_paths) > 0, \"campid{}-pid{} has no images\".format(campid, pid)\n meta_data.append((campid+1, pid+1, img_paths))\n print(\"done camera pair {} with {} identities\".format(campid+1, num_pids))\n return meta_data\n\n meta_detected = _extract_img('detected')\n meta_labeled = _extract_img('labeled')\n\n def _extract_classic_split(meta_data, test_split):\n train, test = [], []\n num_train_pids, num_test_pids = 0, 0\n num_train_imgs, num_test_imgs = 0, 0\n for i, (campid, pid, img_paths) in enumerate(meta_data):\n \n if [campid, pid] in test_split:\n for img_path in img_paths:\n camid = int(osp.basename(img_path).split('_')[2])\n test.append((img_path, num_test_pids, camid))\n num_test_pids += 1\n num_test_imgs += len(img_paths)\n else:\n for img_path in img_paths:\n camid = int(osp.basename(img_path).split('_')[2])\n train.append((img_path, num_train_pids, camid))\n num_train_pids += 1\n num_train_imgs += len(img_paths)\n return train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs\n\n print(\"Creating classic splits (# = 20) ...\")\n splits_classic_det, splits_classic_lab = [], []\n for split_ref in mat['testsets'][0]:\n test_split = _deref(split_ref).tolist()\n\n # create split for detected images\n train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \\\n _extract_classic_split(meta_detected, test_split)\n splits_classic_det.append({\n 'train': train, 'query': test, 'gallery': test,\n 'num_train_pids': num_train_pids, 'num_train_imgs': num_train_imgs,\n 'num_query_pids': num_test_pids, 'num_query_imgs': num_test_imgs,\n 'num_gallery_pids': num_test_pids, 'num_gallery_imgs': num_test_imgs,\n })\n\n # create split for labeled images\n train, num_train_pids, num_train_imgs, test, num_test_pids, num_test_imgs = \\\n _extract_classic_split(meta_labeled, test_split)\n splits_classic_lab.append({\n 'train': train, 'query': test, 'gallery': test,\n 'num_train_pids': num_train_pids, 'num_train_imgs': num_train_imgs,\n 'num_query_pids': num_test_pids, 'num_query_imgs': num_test_imgs,\n 'num_gallery_pids': num_test_pids, 'num_gallery_imgs': num_test_imgs,\n })\n \n write_json(splits_classic_det, self.split_classic_det_json_path)\n write_json(splits_classic_lab, self.split_classic_lab_json_path)\n\n def _extract_set(filelist, pids, pid2label, idxs, img_dir, relabel):\n tmp_set = []\n unique_pids = set()\n for idx in idxs:\n img_name = filelist[idx][0]\n camid = int(img_name.split('_')[2])\n pid = pids[idx]\n if relabel: pid = pid2label[pid]\n img_path = osp.join(img_dir, img_name)\n tmp_set.append((img_path, int(pid), camid))\n unique_pids.add(pid)\n return tmp_set, len(unique_pids), len(idxs)\n\n def _extract_new_split(split_dict, img_dir):\n train_idxs = split_dict['train_idx'].flatten() - 1 # index-0\n pids = split_dict['labels'].flatten()\n train_pids = set(pids[train_idxs])\n pid2label = {pid: label for label, pid in enumerate(train_pids)}\n query_idxs = split_dict['query_idx'].flatten() - 1\n gallery_idxs = split_dict['gallery_idx'].flatten() - 1\n filelist = split_dict['filelist'].flatten()\n train_info = _extract_set(filelist, pids, pid2label, train_idxs, img_dir, relabel=True)\n query_info = _extract_set(filelist, pids, pid2label, query_idxs, img_dir, relabel=False)\n gallery_info = _extract_set(filelist, pids, pid2label, gallery_idxs, img_dir, relabel=False)\n return train_info, query_info, gallery_info\n\n print(\"Creating new splits for detected images (767/700) ...\")\n train_info, query_info, gallery_info = _extract_new_split(\n loadmat(self.split_new_det_mat_path),\n self.imgs_detected_dir,\n )\n splits = [{\n 'train': train_info[0], 'query': query_info[0], 'gallery': gallery_info[0],\n 'num_train_pids': train_info[1], 'num_train_imgs': train_info[2],\n 'num_query_pids': query_info[1], 'num_query_imgs': query_info[2],\n 'num_gallery_pids': gallery_info[1], 'num_gallery_imgs': gallery_info[2],\n }]\n write_json(splits, self.split_new_det_json_path)\n\n print(\"Creating new splits for labeled images (767/700) ...\")\n train_info, query_info, gallery_info = _extract_new_split(\n loadmat(self.split_new_lab_mat_path),\n self.imgs_labeled_dir,\n )\n splits = [{\n 'train': train_info[0], 'query': query_info[0], 'gallery': gallery_info[0],\n 'num_train_pids': train_info[1], 'num_train_imgs': train_info[2],\n 'num_query_pids': query_info[1], 'num_query_imgs': query_info[2],\n 'num_gallery_pids': gallery_info[1], 'num_gallery_imgs': gallery_info[2],\n }]\n write_json(splits, self.split_new_lab_json_path)", "def _save(self):\n\n out_dict = {}\n out_dict[\"version\"] = pyfx.__version__\n out_dict[\"name\"] = self._name\n out_dict[\"src\"] = self._src\n\n # Write out the background file as an image\n bg_file = os.path.join(self._name,\"master_bg_image.png\")\n pyfx.util.to_file(self._bg_frame,bg_file)\n out_dict[\"bg_frame\"] = bg_file\n\n f = open(os.path.join(self._name,\"pyfx.json\"),\"w\")\n json.dump(out_dict,f)\n f.close()", "def __call__(self, results):\r\n if isinstance(results['img'], str):\r\n results['filename'] = results['img']\r\n results['ori_filename'] = results['img']\r\n else:\r\n results['filename'] = None\r\n results['ori_filename'] = None\r\n img = mmcv.imread(results['img'])\r\n results['img'] = img\r\n results['img_fields'] = ['img']\r\n results['img_shape'] = img.shape\r\n results['ori_shape'] = img.shape\r\n return results", "def _dump_json(self):\n if not self._current_id == len(self._img_ids):\n warnings.warn(\n 'Recorded {} out of {} validation images, incomplete results'.format(\n self._current_id, len(self._img_ids)))\n try:\n with open(self._filename, 'w') as f:\n json.dump(self._results, f)\n except IOError as e:\n raise RuntimeError(\"Unable to dump json file, ignored. What(): {}\".format(str(e)))", "def main():\n with open(IMAGEPATH_LIST_PATH, \"rt\") as imagepath_list_handle:\n imagepath_list = [line.strip() for line in imagepath_list_handle.readlines()]\n\n object_detector = ObjectDetector(MODEL_PATH)\n\n dataset_json = []\n for imagepath in imagepath_list:\n image = scipy.misc.imread(imagepath)\n detections = object_detector.run(image)\n\n detections_json = {\"path\": imagepath, \"detections\": [det.to_dict() for det in detections]}\n dataset_json.append(detections_json)\n\n with open(DATASET_PATH, \"wt\") as json_handle:\n json.dump(dataset_json, json_handle, sort_keys=True, indent=4)", "def process(image):\n pass", "def write(self, path):\n\n annotation = copy.deepcopy(self.annotation)\n\n for image_info in annotation['images']:\n image_info['file_name'] = os.path.relpath(image_info['file_name'],\n os.path.dirname(path))\n\n with open(path, 'w') as read_file:\n json.dump(annotation, read_file)", "def get_output_json(self, case_path):\r\n if not os.path.exists(case_path):\r\n logging.ERROR('the path of source files does not exist')\r\n else:\r\n self.case_path = os.path.abspath(case_path)\r\n self.case_json = os.path.join(self.case_path, 'output.json')\r\n self.case_image = os.path.join(self.case_path, 'images')\r\n self.num_name = os.path.abspath(self.case_path).split(sep='\\\\')[-2]\r\n self.chi_name = IdToChinese[self.num_name]\r\n\r\n with io.open(self.case_json, 'r', encoding='utf-8') as f:\r\n json_data = json.load(f)\r\n self.audioResult = json_data['data']['audioResult']\r\n self.docs = self.audioResult['docs']\r\n self.classify_four_w= self.audioResult['4W']\r\n self.approval_information = self.audioResult['approval_information']\r\n return True", "def process_data(output_folder):\n # select imgs\n img_folder = join(output_folder, 'img')\n select_img(output_folder, img_folder, 'HE-green')\n\n mask_folder = join(output_folder, 'mask')\n select_img(output_folder, mask_folder, '_EF5')", "def getimgs():", "def run(self, input_path, output_path):\n # read in data\n try:\n image = Image.open(input_path)\n except Exception:\n raise ValueError(\"invalid image file\")\n \n # data preprocessing\n img = self.preprocess(image)\n \n # perform inference\n output = self.model(img)\n \n # post process\n results = self.postprocess(output)\n \n # save output\n results = {'results': results}\n\n with open(output_path, 'w') as out:\n json.dump(results, out)", "def store_result(arr):\r\n title = arr['Title'].replace('/', '-')\r\n # create directory\r\n if not os.path.exists(title):\r\n os.makedirs(title)\r\n\r\n with open('%s/%s.json' % (title, title), 'w') as f:\r\n f.write(json.dumps(arr))\r\n f.close()\r\n\r\n # images download\r\n images = arr['Photo']\r\n for image in images:\r\n filename = image['src'].split('/')[-1]\r\n file_path = os.path.join(title, filename)\r\n r = requests.get(image['src'], allow_redirects=True)\r\n open(file_path, 'wb').write(r.content)", "def result2json(ifilename, poiname, ofilename):\n nameMap = {\n \"SysWeight1\" : \"mc\",\n \"SysWeight2\" : \"FSR\",\n \"SysWeight3\" : \"bkg\",\n \"SysWeight4\" : \"tagpt\",\n \"SysWeight6\" : \"Prefire\",\n \"SysRecoil2\" : \"recoil_eta\",\n \"SysRecoil3\" : \"recoil_keys\",\n \"SysRecoil6\" : \"recoil_stat0\",\n \"SysRecoil7\" : \"recoil_stat1\",\n \"SysRecoil8\" : \"recoil_stat2\",\n \"SysRecoil9\" : \"recoil_stat3\",\n \"SysRecoil10\": \"recoil_stat4\",\n \"SysRecoil11\": \"recoil_stat5\",\n \"SysRecoil12\": \"recoil_stat6\",\n \"SysRecoil13\": \"recoil_stat7\",\n \"SysRecoil14\": \"recoil_stat8\",\n \"SysRecoil15\": \"recoil_stat9\",\n }\n\n def getNuisName(nuis):\n if nuis in nameMap.keys():\n return nameMap[nuis]\n elif bool(re.match(r\"\\w*bin\\d+shape\", nuis)):\n return \"QCD_\" + nuis\n else:\n return nuis\n\n ifile = ROOT.TFile(ifilename)\n himpact = ifile.Get(\"nuisance_impact_mu\")\n himpact_grouped = ifile.Get(\"nuisance_group_impact_mu\")\n tree = ifile.Get(\"fitresults\")\n tree.GetEntry(0)\n\n # find the POI bin for poiname\n ibinX = -1\n for binX in range(1, himpact.GetNbinsX()+1):\n poi = himpact.GetXaxis().GetBinLabel(binX)\n if poi == poiname:\n ibinX = binX\n continue\n assert ibinX >=0, \"Can not find the POI {} in the postfit file {}. Please check.\".format(poiname, ifilename)\n\n results = OrderedDict()\n results['POIs'] = []\n val = getattr(tree, poiname)\n err = abs(getattr(tree, poiname+\"_err\"))\n poi = OrderedDict()\n poi['fit'] = [val-err, val, val+err]\n poi['name'] = poiname\n results['POIs'].append(poi)\n\n results['method'] = 'default'\n results['params'] = []\n\n # dump impacts\n impacts = OrderedDict()\n for ibinY in range(1, himpact.GetNbinsY()+1):\n nuis = himpact.GetYaxis().GetBinLabel(ibinY)\n impacts[nuis] = himpact.GetBinContent(ibinX, ibinY)\n\n # add the grouped QCD and Recoil systematic\n groupnames = []\n for ibinY in range(1, himpact_grouped.GetNbinsY()+1):\n tmpY = himpact_grouped.GetYaxis().GetBinLabel(ibinY)\n if tmpY == 'stat':\n continue\n impacts[tmpY] = himpact_grouped.GetBinContent(ibinX, ibinY)\n groupnames.append(tmpY)\n\n # sort impacts, descending\n impacts = OrderedDict(sorted(impacts.items(), key=lambda x: abs(x[1]), reverse=True))\n\n pulls = OrderedDict()\n for nuis in impacts.keys():\n if nuis not in groupnames:\n val = getattr(tree, nuis)\n err = getattr(tree, nuis+\"_err\")\n err = abs(err)\n else:\n # manually set the postfit of the grouped sys to [-1,1], and pulled at 0,\n # since only the impacts are useful to us\n val = 0.\n err = 1.\n pulls[nuis] = [val - err, val, val + err]\n\n # save to results\n for nuis in impacts.keys():\n systematic = OrderedDict()\n systematic['fit'] = pulls[nuis]\n systematic['groups'] = []\n systematic['impact_' + poiname] = impacts[nuis]\n systematic['name'] = getNuisName(nuis)\n systematic['prefit'] = [-1.0, 0., 1.0]\n systematic[poiname] = [poi['fit'][1] - impacts[nuis], poi['fit'][1], poi['fit'][1] + impacts[nuis]]\n systematic['type'] = \"Gaussian\"\n print(getNuisName(nuis), pulls[nuis][1], pulls[nuis][1]-pulls[nuis][0], impacts[nuis])\n\n results['params'].append(systematic)\n\n with open(ofilename, 'w') as fp:\n json.dump(results, fp, indent=2)", "def process(self):\n return self.output_image", "def run():\n\n today = datetime.now().strftime(\"%Y-%m-%d\")\n log_file = os.path.abspath(\"logs/{}.log\".format(today))\n logger = RsmasLogger(\"pipeline\", log_file)\n\n images = get_list_of_images()\n # LOG: list of images to process\n logger.log(loglevel.INFO, [img.key for img in images])\n\n for im in images:\n\n logger.log(loglevel.INFO, \"Processing image: {}\".format(im.key))\n\n file_path = \"{}/{}\".format(im.bucket_name, im.key)\n full_path = \"{}_full.jpg\"\n mod_path = \"{}_mod.jpg\"\n aws_path = \"{}/{}/{}/{}\"\n try:\n haz_id, haz_name, sat_name, sat_dir, img_type, img_date, center = summary.pull_summary_data(\n \"/vsis3/{}\".format(file_path))\n sat_id = Satellite.from_params(sat_name, bool(sat_dir))\n except:\n # LOG: error in image metadata format\n logger.log(loglevel.ERROR, '\\tThere was an error in the metadata format of the image. Skipping.')\n continue\n\n aws_path = aws_path.format(haz_id, sat_id, img_type, img_date)\n full_path = full_path.format(img_date)\n mod_path = mod_path.format(img_date)\n\n # 1. Read in image file\n with rasterio.open(\"s3://{}\".format(file_path)) as data:\n band = data.read(1)\n img = plot.show(band)\n img.get_figure().savefig(full_path, dpi=300)\n\n # 3. Compress image\n compressed = immanip.compress_image(full_path, compression_amount=0.3)\n\n # 4 - 5. Pad image and add date on image\n text_image = immanip.add_text_to_image(compressed, img_date)\n\n # 6. Save image locally\n text_image.save(mod_path.format(img_date))\n mod_path_aws = save.get_s3_url(\"{}/{}\".format(aws_path, mod_path))\n full_path_aws = save.get_s3_url(\"{}/{}\".format(aws_path, full_path))\n\n tif_path_aws = save.get_s3_url(\"{}/{}\".format(aws_path, im.key))\n\n # LOG: images successfully moved to S3 bucket\n # LOG: mod_path_aws, full_path_aws, tif_path_aws\n\n hazard = Hazard(haz_id, haz_name, HazardType.VOLCANO, Location(center[0], center[1]), Date(img_date), 0)\n satellite = Satellite.from_params(sat_name, bool(sat_dir))\n image = Image(str(randint(1, 10000000)),\n haz_id,\n satellite,\n ImageType.from_string(img_type),\n Date(img_date),\n ImageURL(full_path_aws),\n ImageURL(tif_path_aws),\n ImageURL(mod_path_aws))\n\n try:\n db = Database()\n except ConnectionError:\n logger.log(loglevel.ERROR, \"\\tThere was an error while connecting to the database. Skipping this image.\")\n continue\n\n db.create_new_hazard(hazard)\n db.create_new_satellite(satellite)\n db.create_new_image(image)\n\n db.close()\n\n # LOG: database successfully updated\n logger.log(loglevel.INFO, \"\\tDatabase succesfully updated.\")\n\n save.save_image_s3(mod_path, \"{}/{}\".format(aws_path, mod_path))\n save.save_image_s3(full_path, \"{}/{}\".format(aws_path, full_path))\n save.move_tif(im.key, \"{}/{}\".format(aws_path, im.key))\n\n logger.log(loglevel.INFO, \"\\tImages were successfully uploaded to the S3 bucket\")\n logger.log(loglevel.INFO, \"\\t\\tmod_path_aws: {}\".format(mod_path_aws))\n logger.log(loglevel.INFO, \"\\t\\tfull_path_aws: {}\".format(full_path_aws))\n logger.log(loglevel.INFO, \"\\t\\ttif_path_aws: {}\".format(tif_path_aws))\n\n # LOG: image completed\n logger.log(loglevel.INFO, \"\\tProcessing of {} completed.\".format(im.key))\n\n # LOG: finished processing images\n logger.log(loglevel.INFO, \"Processing complete.\")", "def repair_json(in_file, out_file, path_to_img):\n with open(in_file, 'rb') as f:\n data = f.readlines()\n\n # delete all \\n characters\n data = map(lambda x: x.rstrip(), data)\n\n # make one JSON object per row\n json_rows = []\n start = -1\n while True:\n try:\n start = data.index('{', start + 1)\n end = data.index('}', start)\n row = ''.join(data[start:end+1])\n row = re.sub(\"\\\"Bmp\\\"\", \"\\\"Image\\\"\", row)\n row = re.sub(\".bmp\", \".jpg\", row)\n json_rows.append(row)\n start = end\n except ValueError:\n break\n\n # join all JSON objects into one comme delimited string enclosed in square brackets\n data_join_str = \"[\" + ','.join(json_rows) + \"]\"\n\n # create JSON object\n repaired_json = json.loads(data_join_str)\n for x in repaired_json:\n f = os.path.join(path_to_img, x['Image'])\n print x['Image']\n if not os.path.isfile(f):\n repaired_json.remove(x)\n # remove out_file if it exists\n try:\n os.remove(out_file)\n except OSError:\n pass\n except IOError:\n pass\n print 'dumping...'\n with open(out_file, 'w+') as f:\n json.dump(repaired_json, f)", "def main():\n\n\n\n skulls_folder = os.listdir(RAW_IMAGE_DIRECTORY)\n\n # fetch and sort the .mnc and .tag files\n mnc_files = [f for f in skulls_folder if 'mnc' in f]\n tag_files = [f for f in skulls_folder if 'tag' in f]\n mnc_names = [i.split('.mnc')[0] for i in mnc_files]\n \n mnc_files.sort()\n tag_files.sort()\n mnc_names.sort()\n\n # Process and package ndarrays as tuples inside npy file\n package_to_npy(RAW_IMAGE_DIRECTORY, mnc_files, tag_files, mnc_names)\n \n print('\\n' * 5)\n\n # Push the npy files to GCP Cloud Storage\n upload_to_gcp(PROCESSED_IMAGE_DIRECTORY, GCP_PROJECT_NAME, GCP_BUCKET_NAME)", "def save_result(res, name):\n with open('dist/'+name+'.json','w') as fp:\n json.dump(res, fp)", "def proc_fid(out, fid):\n img = imageHash.get(fid, [])\n if len(img) == 2:\n out[img[\"mime\"]] = (img[\"img\"], fid)\n return out", "def test_save_images(self):\n save_file(self.quart.save_images, to_single_file=False)", "def pilimg_to_json(val):\n return \"TODO\"", "def output_to_cwl_json(\n galaxy_output, get_metadata, get_dataset, get_extra_files, pseduo_location=False,\n):\n def element_to_cwl_json(element):\n element_output = GalaxyOutput(\n galaxy_output.history_id,\n element[\"object\"][\"history_content_type\"],\n element[\"object\"][\"id\"],\n )\n return output_to_cwl_json(element_output, get_metadata, get_dataset, get_extra_files)\n\n output_metadata = get_metadata(galaxy_output.history_content_type, galaxy_output.history_content_id)\n\n def dataset_dict_to_json_content(dataset_dict):\n if \"content\" in dataset_dict:\n return json.loads(dataset_dict[\"content\"])\n else:\n with open(dataset_dict[\"path\"]) as f:\n return json.load(f)\n\n if output_metadata[\"history_content_type\"] == \"dataset\":\n ext = output_metadata[\"file_ext\"]\n assert output_metadata[\"state\"] == \"ok\"\n if ext == \"expression.json\":\n dataset_dict = get_dataset(output_metadata)\n return dataset_dict_to_json_content(dataset_dict)\n else:\n file_or_directory = \"Directory\" if ext == \"directory\" else \"File\"\n if file_or_directory == \"File\":\n dataset_dict = get_dataset(output_metadata)\n properties = output_properties(pseduo_location=pseduo_location, **dataset_dict)\n basename = properties[\"basename\"]\n extra_files = get_extra_files(output_metadata)\n found_index = False\n for extra_file in extra_files:\n if extra_file[\"class\"] == \"File\":\n path = extra_file[\"path\"]\n if path == SECONDARY_FILES_INDEX_PATH:\n found_index = True\n\n if found_index:\n ec = get_dataset(output_metadata, filename=SECONDARY_FILES_INDEX_PATH)\n index = dataset_dict_to_json_content(ec)\n for basename in index[\"order\"]:\n for extra_file in extra_files:\n if extra_file[\"class\"] == \"File\":\n path = extra_file[\"path\"]\n if path == os.path.join(SECONDARY_FILES_EXTRA_PREFIX, basename):\n ec = get_dataset(output_metadata, filename=path)\n if not STORE_SECONDARY_FILES_WITH_BASENAME:\n ec[\"basename\"] = basename + os.path.basename(path)\n else:\n ec[\"basename\"] = os.path.basename(path)\n ec_properties = output_properties(pseduo_location=pseduo_location, **ec)\n if \"secondaryFiles\" not in properties:\n properties[\"secondaryFiles\"] = []\n\n properties[\"secondaryFiles\"].append(ec_properties)\n else:\n basename = output_metadata.get(\"cwl_file_name\")\n if not basename:\n basename = output_metadata.get(\"name\")\n\n listing = []\n properties = {\n \"class\": \"Directory\",\n \"basename\": basename,\n \"listing\": listing,\n }\n\n extra_files = get_extra_files(output_metadata)\n for extra_file in extra_files:\n if extra_file[\"class\"] == \"File\":\n path = extra_file[\"path\"]\n ec = get_dataset(output_metadata, filename=path)\n ec[\"basename\"] = os.path.basename(path)\n ec_properties = output_properties(pseduo_location=pseduo_location, **ec)\n listing.append(ec_properties)\n\n return properties\n\n elif output_metadata[\"history_content_type\"] == \"dataset_collection\":\n if output_metadata[\"collection_type\"] == \"list\":\n rval = []\n for element in output_metadata[\"elements\"]:\n rval.append(element_to_cwl_json(element))\n elif output_metadata[\"collection_type\"] == \"record\":\n rval = {}\n for element in output_metadata[\"elements\"]:\n rval[element[\"element_identifier\"]] = element_to_cwl_json(element)\n return rval\n else:\n raise NotImplementedError(\"Unknown history content type encountered\")", "def getimage(self):", "def add_img_array(data):\n data['img_array'] = data['url'].map(input_processing)\n data.to_hdf('reddit_img.h5', key='data')", "def run(self, verbose=False):\n from utils import write_to_file # function to write json to file\n self.read_json()\n graph = self.parse_jsons()\n json = self.pipe_vl2vg(graph)\n return self.write_to_file(rawinput=json, filetype='json', output_path=self.output_path, engine_name=self.engine_name, algorithm_name=self.algorithm_name, suffix=self.file_suffix, verbose=verbose)", "def add_processed_image(image_proc_type, name, b64_string, export_file_type):\n\n if image_proc_type == \"contrast stretching\":\n info = process_contrast_stretch(name, b64_string, export_file_type)\n metrics_list = list(info[4])\n num_pixels = metrics_list[0]\n x_coord = metrics_list[1]\n y_coord = metrics_list[2]\n avg_value = metrics_list[3]\n metrics_output = [num_pixels, x_coord, y_coord, avg_value]\n info[6] = info[6].decode(\"utf-8\")\n add_file(info[0], info[1], info[2], info[3], metrics_output, info[6])\n logging.info('Image processed with contrast stretching')\n\n if image_proc_type == \"adaptive equalization\":\n info = process_adapt_equalization(name, b64_string, export_file_type)\n metrics_list = list(info[4])\n num_pixels = metrics_list[0]\n x_coord = metrics_list[1]\n y_coord = metrics_list[2]\n avg_value = metrics_list[3]\n metrics_output = [num_pixels, x_coord, y_coord, avg_value]\n info[6] = info[6].decode(\"utf-8\")\n add_file(info[0], info[1], info[2], info[3], metrics_output, info[6])\n logging.info('Image processed with adaptive equalization')\n\n if image_proc_type == \"histogram equalization\":\n info = process_histogram_equalization(name, b64_string, export_file_type)\n metrics_list = list(info[4])\n num_pixels = metrics_list[0]\n x_coord = metrics_list[1]\n y_coord = metrics_list[2]\n avg_value = metrics_list[3]\n metrics_output = [num_pixels, x_coord, y_coord, avg_value]\n info[6] = info[6].decode(\"utf-8\")\n add_file(info[0], info[1], info[2], info[3], metrics_output, info[6])\n logging.info('Image processed with histogram equalization')\n\n if image_proc_type == \"reverse video\":\n info = process_reverse_image(name, b64_string, export_file_type)\n metrics_list = list(info[4])\n num_pixels = metrics_list[0]\n x_coord = metrics_list[1]\n y_coord = metrics_list[2]\n avg_value = metrics_list[3]\n metrics_output = [num_pixels, x_coord, y_coord, avg_value]\n info[6] = info[6].decode(\"utf-8\")\n add_file(info[0], info[1], info[2], info[3], metrics_output, info[6])\n logging.info('Image processed with reverse image')\n\n if image_proc_type == \"log compression\":\n info = process_log_compression(name, b64_string, export_file_type)\n metrics_list = list(info[4])\n num_pixels = metrics_list[0]\n x_coord = metrics_list[1]\n y_coord = metrics_list[2]\n avg_value = metrics_list[3]\n metrics_output = [num_pixels, x_coord, y_coord, avg_value]\n info[6] = info[6].decode(\"utf-8\")\n add_file(info[0], info[1], info[2], info[3], metrics_output, info[6])\n logging.info('Image processed with log compression')\n\n return jsonify(\"it worked\")", "def do_json(pidx):\n status = \"200 OK\"\n if pidx == 0:\n name = f\"{BASEDIR}/scripts/__init__.py\"\n loader = importlib.machinery.SourceFileLoader(\"scripts\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.data\n else:\n name = get_script_name(pidx)\n if not os.path.isfile(name):\n sys.stderr.write(f\"autoplot/meta 404 {name}\\n\")\n status = \"404 Not Found\"\n output = \"\"\n response_headers = [\n (\"Content-type\", \"application/json\"),\n (\"Content-Length\", str(len(output))),\n ]\n return output, status, response_headers\n try:\n timing = get_timing(pidx)\n except Exception:\n timing = -1\n loader = importlib.machinery.SourceFileLoader(f\"p{pidx}\", name)\n spec = importlib.util.spec_from_loader(loader.name, loader)\n mod = importlib.util.module_from_spec(spec)\n loader.exec_module(mod)\n data = mod.get_description()\n defaults = data.pop(\"defaults\", {\"_r\": \"t\", \"dpi\": \"100\"})\n data[\"maptable\"] = hasattr(mod, \"geojson\")\n data[\"highcharts\"] = hasattr(mod, \"highcharts\")\n data[\"timing[secs]\"] = timing\n\n # Setting to None disables\n if \"_r\" not in defaults or defaults[\"_r\"] is not None:\n data[\"arguments\"].append(\n dict(\n type=\"select\",\n options=FIGSIZES_NAMES,\n name=\"_r\",\n default=defaults.get(\"_r\", \"t\"),\n label=\"Image Pixel Size @100 DPI\",\n )\n )\n data[\"arguments\"].append(\n dict(\n type=\"int\",\n name=\"dpi\",\n default=defaults.get(\"dpi\", \"100\"),\n label=\"Image Resolution (DPI) (max 500)\",\n )\n )\n output = json.dumps(data)\n\n response_headers = [(\"Content-type\", \"application/json\")]\n return output, status, response_headers", "def op_to_json(self, op, out_dir=r'./output_files/'):\n\n #generate export filename and export Path obj\n ts = str(datetime.now())[:-7]\n ts = ts.replace(':','').replace('-','').replace(' ','_')\n ms = self.map_size\n filename = f\"{self.name}_{ms[0]}x{ms[1]}_{ts}.json\"\n export_path = Path(out_dir).joinpath(filename)\n\n export_obj = deepcopy(SHMEPPY_JSON)\n export_obj[\"operations\"].append(op.__dict__)\n\n try:\n result_str = f\"Exporting mapfile: {str(export_path)}\\n\"\n with export_path.open(mode='w') as json_file:\n json.dump(export_obj, json_file)\n except Exception as e:\n result_str = f\"Error: {str(e)}, unable to export.\\n\"\n\n return result_str", "def save_img(self):\r\n self.extract_info_from_file()\r\n path_0 = os.path.join(self.output_path, self.field_id, self.patient_id + self.ext)\r\n path_1 = os.path.join(self.output_path, self.field_id + '_' + self.instance, self.patient_id + self.ext)\r\n if self.shot == '0': # first shot\r\n if os.path.exists(path_0) or os.path.exists(path_1):\r\n print(self.patient_id, 'already done')\r\n pass\r\n else:\r\n if not self.img_computed:\r\n self.compute_img()\r\n if self.instance == '0':\r\n self.img.save(path_0)\r\n else:\r\n self.img.save(path_1)\r\n else: # newer shot\r\n if not self.img_computed:\r\n self.compute_img()\r\n if self.instance == '0':\r\n self.img.save(path_0)\r\n else:\r\n self.img.save(path_1)", "def lice_main(base_folder, s3_client):\n\n sql_credentials = json.load(open(os.environ[\"SQL_CREDENTIALS\"]))\n sql_engine = create_engine(\n \"postgresql://{}:{}@{}:{}/{}\".format(sql_credentials[\"user\"], sql_credentials[\"password\"],\n sql_credentials[\"host\"], sql_credentials[\"port\"],\n sql_credentials[\"database\"]))\n\n metadata = MetaData()\n # step 1 - download crops + json\n # get the two tables we care about\n fish_crops = Table('lati_fish_detections', metadata, autoload=True, autoload_with=sql_engine)\n lice_crops = Table('lati_fish_detections_lice_annotations_reconciled', metadata, autoload=True,\n autoload_with=sql_engine)\n\n # inner join on fish crop id\n # TODO @Thomas debug this\n query = select([fish_crops.c.image_key, lice_crops.c.lice_bbox_list]) \\\n .select_from(lice_crops.join(fish_crops, lice_crops.c.lati_fish_detections_id == fish_crops.c.id)) \\\n .where(and_(fish_crops.c.site_id == 23,\n lice_crops.c.lice_bbox_list != None,\n # func.json_array_length(lice_crops.c.lice_bbox_list) > 0,\n lice_crops.c.created_by == \"[email protected]\"))\n\n json_files = []\n counter = 0\n with sql_engine.connect() as conn:\n for row in conn.execute(query):\n\t if len(row) == 0:\n\t \tcontinue\n # [image_key, lice_json]\n results = {}\n key = row[0]\n _, farm, penid, date, image_name = key.split('/')\n results[\"key\"] = key\n results[\"farm\"] = farm\n results[\"penid\"] = penid\n results[\"date\"] = date\n results[\"image_name\"] = image_name\n results[\"detections\"] = row[1]\n results[\"processed\"] = False\n destination = os.path.join(base_folder, \"crops\", farm, date, penid)\n\n results[\"image_path\"] = os.path.join(destination, image_name)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n with open(os.path.join(destination, image_name.replace(\"jpg\", \"json\")), \"w\") as f:\n json.dump(results, f)\n if not os.path.isfile(os.path.join(destination, image_name)):\n s3_client.download_file(\"aquabyte-crops\", key, os.path.join(destination, image_name))\n counter += 1\n json_files.append(os.path.join(destination, image_name.replace(\"jpg\", \"json\")))\n print(\"{} new files have downloaded\".format(counter))\n\n # step 2 - create training and validation sets\n for jf in json_files:\n with open(jf, \"r\") as f:\n annotations = json.load(f)\n if annotations[\"processed\"]:\n continue\n image = io.imread(annotations[\"image_path\"])\n farm = annotations[\"farm\"]\n date = annotations[\"date\"]\n penid = annotations[\"penid\"]\n image_name = annotations[\"image_name\"]\n for (i, annotation) in enumerate(annotations['detections']):\n category = annotation['category']\n position = annotation['position']\n x1, height, y1, width = position[\"left\"], position[\"height\"], position[\"top\"], position[\"width\"]\n destination = os.path.join(base_folder, \"lice_only\", farm, date, penid, category)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n lice_name = image_name + \".lice_{}.jpg\".format(i)\n io.imsave(os.path.join(destination, lice_name), image[y1:y1+height, x1:x1+width, :])\n # tag as processed\n annotations[\"processed\"] = True\n with open(jf, \"w\") as f:\n json.dump(annotations, f)", "def convert_to_json(image_file):\n img = Image.open(image_file).resize((240, 240))\n img_array = np.array(img)\n predict_request = {\"instances\": [img_array.tolist()]}\n json.dump(predict_request, codecs.open(OUTPUT_FILE, 'w', encoding='utf-8'),\n separators=(',', ':'), sort_keys=True, indent=4)\n return predict_request", "def save_step_4(imgs, output_path=\"./output/step4\"):\n # ... your code here ...\n cv2.imwrite(output_path+\"/output.jpg\", imgs)", "def main():\n import shutil\n import json\n\n if not os.path.isdir(args.cache):\n # creation dossier cache\n os.mkdir(args.cache)\n\n if not os.path.exists(args.cache+'/overviews.json'):\n # creation fichier overviews.json a partir d'un fichier ressource\n shutil.copy2(args.overviews, args.cache+'/overviews.json')\n\n with open(args.cache+'/overviews.json') as json_overviews:\n overviews_dict = json.load(json_overviews)\n if not (\"list_OPI\" in overviews_dict):\n overviews_dict[\"list_OPI\"] = []\n\n out_raster_srs = gdal.osr.SpatialReference()\n out_raster_srs.ImportFromEPSG(overviews_dict['crs']['code'])\n conn_string = \"PG:host=\"+host+\" dbname=\"+database+\" user=\"+user+\" password=\"+password\n db_graph = gdal.OpenEx(conn_string, gdal.OF_VECTOR)\n if db_graph is None:\n raise ValueError(\"Connection to database failed\")\n list_filename = glob.glob(args.input)\n if verbose > 0:\n print(len(list_filename), \"fichier(s) a traiter\")\n\n try:\n with open(args.cache+'/cache_mtd.json', 'r') as inputfile:\n mtd = json.load(inputfile)\n except:\n mtd = {}\n\n cliche_dejaTraites = []\n for filename in list_filename:\n cliche = Path(filename).stem\n \n if (cliche in overviews_dict['list_OPI']):\n # OPI déja traitée\n cliche_dejaTraites.append(cliche)\n else:\n print('nouvelle image: ', filename)\n color = [randrange(255), randrange(255), randrange(255)]\n while (color[0] in mtd) and (color[1] in mtd[color[0]]) and (color[2] in mtd[color[0]][color[1]]):\n color = [randrange(255), randrange(255), randrange(255)]\n if color[0] not in mtd:\n mtd[color[0]] = {}\n if color[1] not in mtd[color[0]]:\n mtd[color[0]][color[1]] = {}\n mtd[color[0]][color[1]][color[2]] = cliche\n process_image(overviews_dict, db_graph, filename, color, out_raster_srs)\n # on ajout l'OPI traitée a la liste\n overviews_dict[\"list_OPI\"].append(cliche)\n\n with open(args.cache+'/cache_mtd.json', 'w') as outfile:\n json.dump(mtd, outfile)\n\n with open(args.cache+'/overviews.json', 'w') as outfile:\n json.dump(overviews_dict, outfile)\n\n print(\"\\n\", len(list_filename) - len(cliche_dejaTraites),\"/\",len(list_filename),\"OPI(s) ajoutée(s)\")\n if len(cliche_dejaTraites) > 0:\n print(cliche_dejaTraites, \"déjà traitées : OPI non recalculée(s)\")", "def make_image_data(image_filenames):\n imgdict = make_image_data_list(image_filenames)\n return json.dumps({\"requests\": imgdict }).encode()", "def save_image(self):\n img = self.driver.find_element_by_xpath(web_map[self.region][img_path]).get_attribute(\"src\")\n img = requests.get(img, stream=True)\n self.search_results.export_image(self.region, img)", "def imagesToJSON(folder_name):\n path = os.path.join('tests', 'data', 'images', folder_name)\n file_names = [file_name for file_name in os.listdir(path) if not file_name.startswith('.')]\n frames = []\n for name in file_names:\n image = Image.open(os.path.join(path, name))\n arr = []\n width, height = image.size\n for y in xrange(height):\n row = []\n for x in xrange(width):\n row.append(1 if not image.getpixel((x,y)) else 0)\n arr.append(row)\n frames.append(arr)\n writeFrames(frames, 'actual', folder_name)", "def save_processed_images(exp_dir, img_dict):\n # save them into a directory called \"processed\"\n img_fname = os.path.join(exp_dir, str(experiment) + '_processed.jpg')", "def process_and_save(db: Broker, uid, tiff_path: str, data_key: str) -> None:\n run = db[uid]\n dk_uid = run.start.get(\"sc_dk_field_uid\", \"\")\n dk_run = db[dk_uid] if dk_uid else None\n dk_image = _mean(dk_run.data(data_key)) if dk_run else None\n image = _mean(run.data(data_key))\n image -= dk_image\n tw = TiffWriter(tiff_path)\n tw.write(image)\n return", "def compute_results(path, swatch, print_to):\r\n\tfile_urls = get_images(path)\r\n\tfor file_url in file_urls:\r\n\t\t# gets the fdxcolorextractor object containing color palette\r\n\t\tcolor_palette = FdxColorExtractor(file_url, swatch).extract()\r\n\r\n\t\t# gets the dictionary part of the object\r\n\t\tcolor_palette_dict = color_palette.__dict__\r\n\r\n\t\t# dumps to json asking the encoder to take dict form of every object\r\n\t\tcolor_palette_jsondump = json.dumps(color_palette_dict, default=lambda o: o.__dict__)\r\n\r\n\t\tprint(color_palette_jsondump, file=print_to)", "def add_image_path_qa_data(\n path_save=\"/home/rafi/PycharmProjects/sose21-pm-language-and-vision-g1/data/ade20k_vqa/ade20k_qa_cleaned_with_image_path.json\"):\n vqa_yes_no = get_ade20_vqa_data()\n qa_cleaned = get_ade20_qa_cleaned()\n # get Key values of the form : \"ADE_train_00005297\": \"training/c/cathedral/indoor/ADE_train_00005297.jpg\"\n vqa_path_dict = {re.search(r'.*/(.*?).jpg', line[\"image_path\"]).group(1): line[\"image_path\"] for line in vqa_yes_no}\n\n corrected_qa_paths = {}\n for k in qa_cleaned.keys():\n if k in vqa_path_dict.keys():\n path = vqa_path_dict[k]\n corrected_qa_paths[path] = qa_cleaned[k]\n\n print(f\"Saving {len(corrected_qa_paths)} corrrected paths to {path_save}\")\n with open(path_save, 'w') as outfile:\n json.dump(corrected_qa_paths, outfile)", "def imageUpload(query,callnum):\n\n source_file_name = \"data/imgsrc/{0}-{1}.png\".format(callnum,query)\n destination_blob_name = \"{0}-{1}.png\".format(callnum,query)\n storage_client = storage.Client()\n bucket = storage_client.get_bucket(os.environ.get('CLOUD_STORAGE_BUCKET'))\n blob = bucket.blob(destination_blob_name)\n blob.upload_from_filename(source_file_name)\n\n\n # Make the blob publicly viewable.\n blob.make_public()\n image_public_url = blob.public_url\n print('Image {0}: {1} uploaded to {2}.'.format(callnum,\n source_file_name,\n destination_blob_name))\n\n return destination_blob_name\n #print(json.dumps(data))\n \"\"\"\n #writing to file, this erases the file if it already existed\n fin = open('data/{0}-{1}.json'.format(callnum,query),'w+')\n fin.close()\n\n fin = open('data/{0}-{1}.json'.format(callnum,query),'w+')\n fin.write(response)\n end = time.time()\n fin.close()\n \"\"\"", "def _json_export(self, exppath):\n # TODO: Settle on JSON format for colortable\n pass", "def dump(self, filename=\".azimint.json\"):\n print \"Dump!\"\n to_save = { \"poni\": str(self.poni.text()).strip(),\n \"detector\": str(self.detector.currentText()).lower(),\n \"wavelength\":float_(self.wavelength.text()),\n \"splineFile\":str(self.splineFile.text()).strip(),\n \"pixel1\": float_(self.pixel1.text()),\n \"pixel2\":float_(self.pixel2.text()),\n \"dist\":float_(self.dist.text()),\n \"poni1\":float_(self.poni1.text()).strip(),\n \"poni2\":float_(self.poni2.text()).strip(),\n \"rot1\":float_(self.rot1.text()).strip(),\n \"rot2\":float_(self.rot2.text()).strip(),\n \"rot3\":float_(self.rot3.text()).strip(),\n \"do_dummy\": bool(self.do_dummy.isChecked()),\n \"do_mask\": bool(self.do_mask.isChecked()),\n \"do_dark\": bool(self.do_dark.isChecked()),\n \"do_flat\": bool(self.do_flat.isChecked()),\n \"do_polarization\":bool(self.do_polarization.isChecked()),\n \"val_dummy\":float_(self.val_dummy.text()).strip(),\n \"delta_dummy\":float_(self.delta_dummy.text()).strip(),\n \"mask_file\":str(self.mask_file.text()).strip(),\n \"dark_current\":str(self.dark_current.text()).strip(),\n \"flat_field\":str(self.flat_field.text()).strip(),\n \"polarization_factor\":float_(self.polarization_factor.value()),\n \"nbpt_rad\":int_(self.rad_pt.text()),\n \"do_2D\":bool(self.do_2D.isChecked()),\n \"nbpt_azim\":int_(self.nbpt_rad.text()),\n \"chi_discontinuity_at_0\": bool(self.chi_discontinuity_at_0.isChecked()),\n \"do_radial_range\": bool(self.do_radial_range.isChecked()),\n \"do_azimuthal_range\": bool(self.do_azimuthal_range.isChecked()),\n \"radial_range_min\":float_(self.radial_range_min.text()),\n \"radial_range_max\":float_(self.radial_range_max.text()),\n \"azimuth_range_min\":float_(self.azimuth_range_min.text()),\n \"azimuth_range_max\":float_(self.azimuth_range_max.text()),\n }\n if self.q_nm.isChecked():\n to_save[\"unit\"] = \"q_nm^-1\"\n elif self.tth_deg.isChecked():\n to_save[\"unit\"] = \"2th_deg\"\n elif self.r_mm.isChecked():\n to_save[\"unit\"] = \"r_mm\"\n with open(filename, \"w\") as myFile:\n json.dump(to_save, myFile, indent=4)\n logger.debug(\"Saved\")", "def loadjson(path, objectsofinterest, img):\n with open(path) as data_file: \n data = json.load(data_file)\n # print (path)\n pointsBelief = []\n boxes = []\n points_keypoints_3d = []\n points_keypoints_2d = []\n pointsBoxes = []\n poses = []\n centroids = []\n\n translations = []\n rotations = []\n points = []\n\n for i_line in range(len(data['objects'])):\n info = data['objects'][i_line]\n if not objectsofinterest is None and \\\n not objectsofinterest in info['class'].lower():\n continue \n \n box = info['bounding_box']\n boxToAdd = []\n\n boxToAdd.append(float(box['top_left'][0]))\n boxToAdd.append(float(box['top_left'][1]))\n boxToAdd.append(float(box[\"bottom_right\"][0]))\n boxToAdd.append(float(box['bottom_right'][1]))\n boxes.append(boxToAdd)\n\n boxpoint = [(boxToAdd[0],boxToAdd[1]),(boxToAdd[0],boxToAdd[3]),\n (boxToAdd[2],boxToAdd[1]),(boxToAdd[2],boxToAdd[3])]\n\n pointsBoxes.append(boxpoint)\n \n # 3dbbox with belief maps\n points3d = []\n \n pointdata = info['projected_cuboid']\n for p in pointdata:\n points3d.append((p[0],p[1]))\n\n # Get the centroids\n pcenter = info['projected_cuboid_centroid']\n\n points3d.append ((pcenter[0],pcenter[1]))\n pointsBelief.append(points3d)\n points.append (points3d + [(pcenter[0],pcenter[1])])\n centroids.append((pcenter[0],pcenter[1]))\n\n # load translations\n location = info['location']\n translations.append([location[0],location[1],location[2]])\n\n # quaternion\n rot = info[\"quaternion_xyzw\"]\n rotations.append(rot)\n\n return {\n \"pointsBelief\":pointsBelief, \n \"rotations\":rotations,\n \"translations\":translations,\n \"centroids\":centroids,\n \"points\":points,\n \"keypoints_2d\":points_keypoints_2d,\n \"keypoints_3d\":points_keypoints_3d,\n }", "def get_text_prediction():\n json = request.get_json()\n # print(json)\n if len(json['image']) == 0:\n return jsonify({'error': 'invalid input'})\n imgdata = base64.b64decode(json['image'])\n filename = 'some_image.png' # I assume you have a way of picking unique filenames\n with open(filename, 'wb') as f:\n f.write(imgdata)\n idx, dis= x.search(querry_image = 'some_image.png')\n print(idx, dis)\n # print(idx)\n data_path = x.find(idx)\n json_results = x.return_json(data_path)\n print(data_path)\n # print(json_results)\n # json_results = jsonify(json_results)\n return jsonify(json_results)", "def read_image_data(self):\n\n for sequence_name in self.sequence_name_list:\n sequence = self.sequences[sequence_name]\n for image_id in sequence.image_id_list:\n sequence.image_dict[image_id].image_path = '{}{}/{}'.format(self.root_dir, self.name, sequence.image_dict[image_id].filename)", "def loadjson(path, objectsofinterest, img):\n with open(path) as data_file:\n data = json.load(data_file)\n # print (path)\n pointsBelief = []\n boxes = []\n points_keypoints_3d = []\n points_keypoints_2d = []\n pointsBoxes = []\n poses = []\n centroids = []\n\n translations = []\n rotations = []\n points = []\n\n for i_line in range(len(data['objects'])):\n info = data['objects'][i_line]\n if not objectsofinterest is None and \\\n not objectsofinterest in info['class'].lower():\n continue\n\n box = info['bounding_box']\n boxToAdd = []\n\n boxToAdd.append(float(box['top_left'][0]))\n boxToAdd.append(float(box['top_left'][1]))\n boxToAdd.append(float(box[\"bottom_right\"][0]))\n boxToAdd.append(float(box['bottom_right'][1]))\n boxes.append(boxToAdd)\n\n boxpoint = [(boxToAdd[0], boxToAdd[1]), (boxToAdd[0], boxToAdd[3]),\n (boxToAdd[2], boxToAdd[1]), (boxToAdd[2], boxToAdd[3])]\n\n pointsBoxes.append(boxpoint)\n\n # 3dbbox with belief maps\n points3d = []\n\n pointdata = info['projected_cuboid']\n for p in pointdata:\n points3d.append((p[0], p[1]))\n\n # Get the centroids\n pcenter = info['projected_cuboid_centroid']\n\n points3d.append((pcenter[0], pcenter[1]))\n pointsBelief.append(points3d)\n points.append(points3d + [(pcenter[0], pcenter[1])])\n centroids.append((pcenter[0], pcenter[1]))\n\n # load translations\n location = info['location']\n translations.append([location[0], location[1], location[2]])\n\n # quaternion\n rot = info[\"quaternion_xyzw\"]\n rotations.append(rot)\n\n return {\n \"pointsBelief\": pointsBelief,\n \"rotations\": rotations,\n \"translations\": translations,\n \"centroids\": centroids,\n \"points\": points,\n \"keypoints_2d\": points_keypoints_2d,\n \"keypoints_3d\": points_keypoints_3d,\n }", "def _dump_image(self):\n if not self._current_id == len(self._img_ids):\n warnings.warn(\n 'Recorded {} out of {} validation images, incomplete results'.format(\n self._current_id, len(self._img_ids)))\n try:\n for im_name, im in self._panoptic_images.items():\n cv2.imwrite(osp.join(self._save_imgpath, im_name), im)\n except IOError as e:\n raise RuntimeError(\"Unable to dump images, ignored. What(): {}\".format(str(e)))", "def save(self):\n\n self.image.save(\"./output/\" + self.name + \" pg\" + str(self._page) + \".png\")", "def main():\n \n # for inserting other images, add tem to /input folder and list them here\n images = (\n 'image-0',\n 'image-1',\n 'image-2'\n )\n\n for image_name in images:\n print(image_name, \"image:\")\n\n image = open_image(image_name)\n display_image(image, \"Original input \" + image_name)\n\n grayscale_v = transform_colors(image)\n display_image(grayscale_v[:,:,0], \"Grayscale \" + image_name)\n save_image(image_name + \"-grayscale\", grayscale_v[:,:,0])\n\n contours_v, contours = get_contours(grayscale_v)\n display_image(contours_v, \"Contours \" + image_name)\n save_image(image_name + \"-contours\", contours_v)\n\n labeled_img, areas = get_measures(image, contours[1:])\n display_image(labeled_img, \"Labeled \" + image_name)\n save_image(image_name + \"-labeled\", labeled_img)\n\n areas_histogram(areas, image_name)", "def main(value: str) -> str:\n images = json.loads(value)\n\n prediction_results = []\n for image_url in images:\n results = predict_image_from_url(image_url)\n if results is not None:\n prediction_results.append({\n 'tag': results['predictedTagName'],\n 'url': image_url\n })\n else:\n prediction_results.append({\n 'tag': 'error',\n 'url': image_url\n })\n\n return json.dumps(prediction_results)", "def add_image_face():\n\n try:\n img = decode_image(request.files[\"image\"].read())\n except Exception as e:\n log.error(e)\n data = {\"error\": \"Error while loading image\"}\n return jsonify(data), 500\n save_picture = False\n if request.args.get(\"save\") == \"true\":\n save_picture = True\n \n face_img, _ = processor.extract_faces()\n #TODO\n #1. get several images if possible\n #2. save face_img array as picture if save_picture == True\n #3. pipe face_img array to embedder --> embedder needs to be modified to not from a folder, but from array of face_img\n #4. get the embedder result, insert to a pickle object --> can be section ID, or whatever", "def save_image_info(filename, class_labels):\n\n # save prediction info locally\n with open(IMAGE_INFO_JSON, 'r') as f:\n image_info = json.load(f)\n image_info[filename] = class_labels\n\n with open(IMAGE_INFO_JSON, 'w') as f:\n json.dump(image_info, f, indent=4)", "def outputs(self):\n return {\"path_to_dtb_json_file\": File_IO(\n self.node.outputs[0])}", "def img_save(self):\n file_name, extension = return_folder_file_extension(self.img_name)[1:]\n image_name_save = \"%s_D=%s_Rs=%s_size=%s_offset=%i%s\" % (file_name, self.D, self.Rs, self.axe_X, self.offset_X+self.offset_X2, extension)\n\n if self.img2 is not None:\n self.img2.save(image_name_save)\n print(\"Saved \"+image_name_save)\n else:\n print(\"No image to save\")", "def deserialize_image(self, data, give_file_name):\r\n # Generate a random 8-character name\r\n # name = \"img_\" + self.generate_random_name() + \".png\"\r\n name = give_file_name + \".png\"\r\n file_path = os.path.join(self.temp_dir, name)\r\n img = Image.frombytes(data['mode'], data['size'], data['pixels'])\r\n img.save(file_path)\r\n return file_path", "def process(self, datum):\n self.image_number += 1\n image_number = make_numbered_prefix(self.image_number, 6)\n filename = os.path.join(self.batch_dirs[self.batch_index],\n image_number + self.base_filename)\n\n cv2.imwrite(filename, datum)\n self.batch_index += 1\n if self.return_type == 'datum':\n return datum\n else:\n return filename", "def faces_info_export(frame):\n faces_info_dict = {}\n faces_info_dict.setdefault('name', [])\n # faces_info_dict.setdefault('Info', [])\n faces_info_dict.setdefault('time_mark', [])\n # faces_info_dict.setdefault('image_info', [])\n faces_info_dict.setdefault('accuracy', [])\n faces_info_dict.setdefault('face_on_cam', [])\n # faces_info_dict.setdefault('employee_info', [])\n\n # path_of_img = frame\n #print(frame)\n # frame = cv2.imread(path_of_img)\n #cv2.imshow('parh', frame)\n #print(frame)\n # Для более быстрой обработки измениним размер в 1/4 раза\n #small_frame = cv2.resize(frame, (0, 0), fx=1, fy=1)\n # Конвертируем цветвоую схему получаемого изображения\n #rgb_small_frame = small_frame[:, :, ::-1]\n try:\n rgb_frame = frame[:,:,::-1].copy()\n recognize_faces_params = [config_gettype('recognize_faces', 'FRS.ini', param) for param in\n inspect.getfullargspec(recognize_faces)[0]]\n recognize_faces_params.remove('rgb_small_frame')\n recognize_faces_params.insert(0, rgb_frame)\n predictions = recognize_faces(*recognize_faces_params)\n\n # txt_path = [config_gettype('read_txt', 'FRS.ini', 'path')]\n # txt_path=str(txt_path)\n for name, _, accur, rec in predictions:\n faces_info_dict['name'].append(name)\n faces_info_dict['time_mark'].append(datetime.now())\n # faces_info_dict['image_info'].append(str(path_of_img))\n faces_info_dict['accuracy'].append(float(accur))\n faces_info_dict['face_on_cam'].append(bool(rec))\n\n # if name != 'unknown':\n # faces_info_dict['employee_info'].append(str(open(read_txt(txt_path)[name]).read()))\n # else:\n # faces_info_dict['employee_info'].append('no_info')\n\n faces_info_df = pd.DataFrame.from_dict(faces_info_dict)\n faces_info_df.to_csv('faces_info_csv')\n return faces_info_df\n except TypeError as e:\n print('None')", "def render_and_save():\n\n rendering_config = configuration.get_config()\n rendering_config = ml_collections.FrozenConfigDict(rendering_config)\n aspect_ratio = rendering_config.aspect_ratio\n height = rendering_config.height\n width = int(aspect_ratio * height)\n\n scene_camera = build_camera(rendering_config, aspect_ratio)\n world = build_world(rendering_config)\n\n # Render.\n logging.info(\"Tracing rays...\")\n render_image_fn = jax.jit(\n render.generate_image,\n static_argnames=[\"height\", \"width\", \"config\"])\n image = render_image_fn(height, width, scene_camera, world, rendering_config)\n\n image = render.correct_gamma(image, gamma=rendering_config.gamma_correction)\n\n logging.info(\"Saving to file...\")\n output.export_as_ppm(image, rendering_config.output_file)\n\n return image", "def iiif_info_json(images):\n return json.dumps([image[\"image\"].info() for image in images])", "def main(filename, iterations, save_diagnostics, output_dir, burnin):\n #data = []\n #with open(filename,'rb') as json_data:\n #skip header\n #jsondata = json.load(json_data)\n #j=0\n #while j<271:\n #eruption_time = jsondata[j]['FIELD1']\n #waiting_time = jsondata[j]['FIELD2']\n #data.append([float(eruption_time), float(waiting_time)])\n #j=j+1\n\n #generate ida images\n data = np.array([[131,3,1],[49,1,1],[17,7,1],[55,7,19],[80,5,1],[40,2,2],[91,21,6],[19,16,1],[27,7,1],[15,50,2],[37,1,7],[17,3,1],[22,32,2],[68,2,1],[26,2,3],[15,2,3],[246,2,1],[25,2,1],[19,1,1],[98,1,2],[54,13,1],[168,2,4],[20,102,5],[40,2,1],[41,1,1],[44,19,16],[17,6,1],[92,12,1],[17,2,1],[16,5,3],[45,11,1],[20,10,1],[26,1,2],[21,9,9],[26,10,1],[187,4,2],[65,28,4],[17,9,33],[23,39,1],[58,4,4],[41,107,3],[28,3,1],[16,1,1],[17,16,4],[17,16,1],[17,5,1],[83,2,2],[17,1,2],[26,4,2],[22,7,2],[16,1,1],[15,2,1],[15,2,1],[111,8,1],[25,6,1],[112,4,1],[19,10,2],[38,25,4],[29,1,5],[17,2,1],[111,9,8],[53,5,4],[29,7,1],[25,8,2],[23,2,134],[32,6,1],[27,1,1],[61,4,2],[41,163,4],[57,11,2],[24,2,1],[16,18,1],[81,7,14],[169,5,1],[19,4,1],[412,5,1],[32,2,7],[19,28,3],[17,11,1],[44,4,5],[27,2,2],[18,1,7],[15,3,3],[18,10,1],[19,6,10],[46,2,5],[20,12,3],[25,6,4],[18,4,1],[15,40,8],[16,11,16],[237,1,1],[26,13,2],[26,4,1],[101,5,5],[50,2,1],[22,45,5],[16,7,2],[17,4,2],[19,2,3],[22,1,1],[260,6,1],[20,15,1],[24,5,1],[33,2,1],[16,1,5],[21,18,1],[22,1,1],[18,13,2],[124,3,1],[16,6,1],[19,6,2],[71,2,1],[232,2,2],[21,2,1],[231,11,1],[201,49,2],[28,12,1],[68,5,1],[56,26,7],[17,1,8],[19,10,2],[120,13,2],[218,3,1],[46,5,6],[57,4,1],[30,5,2],[17,8,4],[17,22,1],[15,5,1],[16,7,1],[26,13,1],[28,22,2],[100,1,2],[58,12,2],[52,9,11],[21,4,2],[18,4,1],[699,1,1],[401,6,3],[20,7,1],[20,3,13],[27,1,1],[35,2,2],[27,6,1],[15,13,1],[17,6,1],[26,28,4],[89,2,3],[36,11,2],[17,11,2],[15,1,1],[59,3,1],[15,3,1],[20,11,1],[49,1,1],[24,3,1],[25,7,1],[29,1,1],[61,2,2],[28,3,13],[82,2,8],[22,2,1],[21,25,3],[73,3,2],[22,8,1],[51,3,12],[16,6,1],[64,2,4],[22,2,2],[19,7,1],[69,2,1],[17,8,9],[19,1,13],[28,35,3],[134,2,1],[19,12,1],[27,13,1],[17,10,1],[16,17,4],[46,2,3],[15,1,2],[35,15,2],[20,6,1],[16,10,3],[33,11,1],[20,8,4],[15,5,1],[33,5,2],[460,6,1],[132,2,1],[73,14,3],[34,5,1],[123,1,2],[15,8,1],[30,1,1],[16,1,1],[73,3,1],[54,4,1],[17,1,9],[17,17,3],[22,1,3],[46,16,8],[18,1,1],[22,3,2],[21,4,1],[40,5,1],[19,2,1],[16,11,1],[19,4,1],[26,4,1],[87,1,3],[75,1,8],[25,1,1],[16,1,1],[17,10,3],[15,44,2],[79,3,1],[21,19,1],[292,5,13],[27,4,1],[25,2,1],[23,34,1],[36,2,1],[15,2,7],[18,3,3],[62,1,7],[16,61,5],[15,5,1],[36,5,1],[67,8,3],[18,4,1],[23,2,1],[16,21,3],[32,7,1],[22,6,1],[88,5,1],[19,2,4],[38,2,1],[47,6,28],[18,35,3],[159,15,1],[25,3,5],[295,9,4],[26,2,1],[27,8,3],[86,6,1],[24,25,4],[18,1,2],[16,6,1],[64,16,1],[39,1,2],[30,1,4],[44,1,3],[82,11,4],[28,13,2],[46,19,1],[15,26,1],[30,6,11],[51,3,6],[19,20,1],[940,6,4],[21,6,1],[29,2,1],[20,2,1],[31,2,1],[21,2,3],[25,27,1],[26,2,1],[17,4,1],[64,7,1],[126,7,15],[18,8,1],[20,13,2],[16,7,2],[18,2,1],[19,4,5],[29,1,1],[80,12,2],[42,14,6],[107,2,1],[15,4,1],[48,16,1],[62,3,2],[15,13,1],[29,48,7],[25,4,1],[17,5,20],[19,7,3],[22,10,3],[58,15,3],[17,14,1],[121,2,2],[33,64,11],[16,15,2],[39,6,2],[25,69,7],[69,2,1],[41,6,2],[20,5,1],[42,22,4],[18,17,4],[16,14,3],[27,14,1],[20,1,1],[44,1,101],[33,9,1],[26,2,8],[30,24,3],[27,24,2],[34,7,1],[39,6,3],[20,2,3],[55,5,1],[22,22,2],[17,2,1],[55,3,1],[29,10,5],[60,12,2],[18,13,3],[93,3,2],[15,3,1],[26,5,5],[18,1,1],[17,16,2],[15,13,3],[22,12,1],[256,19,27],[18,7,8],[22,3,1],[35,3,4],[16,2,1],[19,6,2],[24,1,1],[29,3,2],[36,21,8],[24,1,1],[18,6,2],[26,24,11],[19,15,2],[16,1,1],[28,4,1],[60,11,1],[62,4,2],[70,2,1],[75,1,2],[125,3,1],[21,6,1],[165,23,2],[108,1,1],[35,5,1],[251,19,12],[137,4,1],[81,11,4],[104,19,4],[18,18,3],[19,13,1],[18,112,5],[19,6,2],[28,7,2],[23,9,1],[20,15,7],[34,1,1],[24,12,3],[15,5,1],[40,9,4],[24,41,6],[35,1,1],[17,3,1],[17,3,4],[46,7,2],[21,8,10],[17,7,4],[36,6,1],[32,6,2],[31,1,1],[17,32,5],[26,3,4],[16,4,1],[21,2,1],[19,4,1],[33,4,1],[46,7,1],[28,9,1],[169,9,24],[24,18,2],[103,6,1],[93,1,1],[156,2,1],[58,7,1],[55,30,3],[15,5,1],[20,9,1],[19,20,1],[44,1,3],[16,2,1],[23,4,1],[22,10,1],[16,138,5],[17,2,1],[17,1,2],[70,8,5],[15,3,6],[22,6,1],[20,1,1],[35,2,4],[15,3,1],[26,119,46],[390,18,2],[22,4,1],[175,5,2],[23,4,1],[26,2,21],[17,1,2],[112,4,1],[18,22,5],[22,2,1],[122,13,1],[18,1,1],[27,7,1],[26,18,5],[18,1,3],[28,1,15],[35,11,1],[15,2,1],[55,6,5],[67,3,1],[30,5,7],[31,12,1],[16,9,12],[43,7,1],[23,21,1],[43,2,7],[53,40,1],[58,6,1],[29,27,11],[65,6,2],[27,4,2],[15,7,2],[17,26,13],[48,4,79],[30,2,6],[25,1,1],[20,20,6],[59,2,5],[15,14,4],[18,7,1],[18,2,1],[28,7,1],[35,1,1],[15,12,4],[52,2,2],[16,25,1],[91,1,1],[27,7,3],[62,4,1],[29,11,1],[25,4,3],[15,1,1],[40,6,2],[19,2,2],[24,14,2],[33,5,1],[58,3,3],[23,1,4],[15,2,2],[92,5,1],[17,2,1],[16,10,1],[50,8,1],[24,2,1],[73,1,1],[30,33,55],[18,15,1],[15,9,4],[23,1,3],[17,5,1],[43,3,1],[15,9,2],[19,4,2],[20,20,4],[31,1,2],[21,3,1],[79,9,13],[20,3,24],[56,2,1],[26,1,2],[15,3,1],[30,12,1],[64,6,1],[327,8,47],[39,2,1],[22,17,5],[18,6,3],[74,14,2],[17,4,1],[39,1,3],[520,9,3],[65,9,1],[36,1,4],[264,3,3],[16,1,1],[18,5,3],[22,16,3],[21,2,1],[15,3,3],[49,5,1],[37,19,2],[19,13,2],[30,1,1],[44,4,1],[19,9,31],[22,4,2],[21,4,5],[16,4,1],[40,17,1],[15,12,4],[43,4,3],[21,30,1],[60,16,3],[28,2,1],[38,16,2],[19,3,1],[68,18,4],[1,4,3],[1,9,1],[1,2,2],[1,1,4],[1,148,4],[1,6,1],[1,16,1],[1,4,1],[1,19,3],[1,7,3],[1,2,2],[1,4,2],[1,47,5],[1,2,2],[1,1,4],[1,1,2],[1,1,2],[1,1,1],[1,4,2],[1,7,1],[1,4,6],[1,2,1],[1,5,4],[1,9,3],[1,9,2],[1,7,1],[1,4,1],[1,10,2],[1,1,1],[1,5,1],[1,5,1],[1,2,16],[1,2,1],[1,1,1],[1,3,2],[1,8,3],[1,1,18],[1,5,1],[1,14,3],[1,6,6],[1,7,1],[1,1,1],[1,16,1],[1,2,1],[1,2,1],[1,1,2],[1,4,4],[1,4,1],[1,9,1],[1,25,7],[1,1,1],[1,8,2],[1,1,4],[1,77,8],[1,1,3],[1,6,3],[1,4,2],[1,2,2],[1,2,1],[1,40,1],[1,26,3],[1,1,4],[1,1,1],[1,2,2],[1,1,2],[1,15,1],[1,35,86],[1,3,2],[1,4,1],[1,2,1],[1,4,3],[1,30,1],[1,2,1],[1,4,2],[1,2,1],[1,1,1],[1,2,1],[1,3,1],[1,2,3],[1,3,1],[1,14,1],[1,3,2],[1,7,4],[1,6,2],[1,2,1],[1,23,2],[1,4,1],[1,4,3],[1,26,3],[1,47,15],[1,3,5],[1,5,1],[1,3,1],[1,2,1],[1,2,1],[1,3,1],[1,36,1],[1,2,1],[1,1,9],[1,6,1],[1,2,1],[1,8,3],[1,7,1],[1,33,2],[1,14,4],[1,13,3],[1,2,1],[1,5,1],[1,7,2],[1,9,3],[1,6,1],[1,3,1],[1,9,1],[1,2,2],[1,2,1],[1,6,3],[1,4,2],[1,2,1],[1,1,1],[1,13,4],[1,9,2],[1,4,2],[1,7,14],[1,8,1],[1,3,1],[1,25,2],[1,2,1],[1,11,1],[1,2,1],[1,1,1],[1,3,3],[1,3,2],[1,2,1],[1,2,1],[1,2,8],[1,9,1],[1,13,9],[1,3,1],[1,8,1],[1,102,71],[1,22,1],[1,2,3],[1,22,2],[1,1,1],[1,3,1],[1,12,1],[1,3,2],[1,1,1],[1,5,2],[1,30,6],[1,14,1],[1,2,1],[1,1,1],[1,5,1],[1,8,1],[1,4,2],[1,3,1],[1,2,1],[1,1,1],[1,1,1],[1,12,1],[1,14,1],[1,10,2],[1,22,3],[1,15,2],[1,4,2],[1,5,1],[1,10,2],[1,10,26],[1,1,2],[1,1,2],[1,17,1],[1,1,1],[1,7,1],[1,1,1],[1,8,2],[1,5,2],[1,15,1],[1,16,2],[1,7,1],[1,26,1],[1,16,2],[1,13,6],[1,3,3],[1,2,1],[1,2,1],[1,5,3],[1,1,1],[1,4,1],[1,1,1],[1,2,2],[1,13,4],[1,50,2],[1,12,3],[1,2,1],[1,16,5],[1,2,8],[1,3,5],[1,1,1],[1,25,1],[1,5,1],[1,13,2],[1,1,2],[1,8,1],[1,13,1],[1,4,4],[1,2,3],[1,7,2],[1,2,4],[1,2,1],[1,1,2],[1,4,1],[1,3,2],[1,8,4],[1,4,1],[1,2,2],[1,2,1],[1,3,1],[1,7,1],[1,8,5],[1,34,4],[1,2,3],[1,1,1],[1,8,3],[1,3,1],[1,26,2],[1,3,1],[1,1,6],[1,2,4],[1,7,1],[1,9,2],[1,3,93],[1,2,1],[1,3,2],[1,3,3],[1,15,3],[1,12,1],[1,1,1],[1,1,5],[1,4,1],[1,1,4],[1,2,1],[1,6,4],[1,9,1],[1,1,9],[1,11,1],[1,68,2],[1,7,1],[1,11,1],[1,6,1],[1,5,2],[1,2,1],[1,19,1],[1,3,1],[1,1,2],[1,37,1],[1,19,1],[1,4,5],[1,8,1],[1,1,1],[1,7,1],[1,3,1],[1,4,1],[1,6,7],[1,2,1],[1,14,3],[1,4,1],[1,6,5],[1,1,1],[1,1,1],[1,2,1],[1,1,2],[1,7,2],[1,8,1],[1,17,136],[1,6,1],[1,3,2],[1,9,12],[1,7,2],[1,2,9],[1,1,4],[1,3,1],[1,10,1],[1,6,16],[1,8,1],[1,2,2],[1,2,2],[1,4,3],[1,3,3],[1,24,3],[1,68,28],[1,16,1],[1,9,2],[1,1,2],[1,18,7],[1,3,1],[1,5,2],[1,1,3],[1,3,1],[1,3,8],[1,73,5],[1,6,3],[1,5,1],[1,2,1],[1,15,7],[1,80,2],[1,3,1],[1,12,3],[1,8,1],[1,2,1],[1,9,5],[1,3,2],[1,319,20],[1,2,1],[1,4,6],[1,5,4],[1,25,1],[1,8,1],[1,6,5],[1,18,1],[1,2,2],[1,5,2],[1,10,1],[1,10,1],[1,2,1],[1,6,2],[1,7,2],[1,39,1],[1,7,79],[1,28,4],[1,2,1],[1,4,1],[1,25,5],[1,23,3],[1,10,3],[1,2,1],[1,13,1],[1,2,2],[1,6,1],[1,6,4],[1,12,1],[1,4,1],[1,3,1],[1,10,1],[1,4,2],[1,7,1],[1,11,1],[1,6,1],[1,4,2],[1,3,3],[1,1,1],[1,1,1],[1,3,3],[1,3,2],[1,15,1],[1,1,1],[1,1,4],[1,26,2],[1,1,1],[1,7,1],[1,4,63],[1,1,19],[1,96,7],[1,7,2],[1,6,1],[1,4,1],[1,18,2],[1,1,2],[1,4,1],[1,3,3],[1,18,1],[1,3,1],[1,14,1],[1,6,2],[1,13,1],[1,1,5],[1,13,2],[1,1,1],[1,4,4],[1,10,1],[1,2,1],[1,12,3],[1,7,1],[1,8,1],[1,3,1],[1,2,2],[1,4,5],[1,9,1],[1,2,1],[1,2,1],[1,6,8],[1,32,3],[1,3,2],[1,6,1],[1,5,1],[1,7,1],[1,4,2],[1,2,1],[1,5,4],[1,1,2],[1,9,1],[1,2,1],[1,11,1],[1,5,2],[1,2,1],[1,1,1],[1,3,1],[1,7,13],[1,4,4],[1,1,1],[1,6,1],[1,1,3],[1,6,6],[1,6,1],[1,4,4],[1,10,1],[1,15,1],[1,3,7],[1,6,1],[1,9,1],[1,14,23],[1,14,2],[1,6,3],[1,2,1],[1,9,1],[1,1,3],[1,6,4],[1,15,2],[1,8,1],[1,6,6],[1,16,10],[1,5,4],[1,30,3],[1,7,1],[1,4,1],[1,3,1],[1,6,6],[1,1,2],[1,3,2],[1,1,1],[1,1,1],[1,1,1],[1,2,5],[1,2,1],[1,2,5],[1,24,1],[1,3,1],[1,6,1],[1,2,1],[1,4,1],[1,2,2],[1,4,1],[1,1,1],[1,3,1],[1,8,2],[1,4,2],[1,2,2],[1,2,1],[1,12,6],[1,2,1],[1,32,42],[1,7,1],[1,7,1],[1,12,1],[1,2,1],[1,6,1],[1,42,1],[1,2,1],[1,1,2],[1,2,1],[1,6,1],[1,2,2],[1,8,1],[1,22,4],[1,1,1],[1,11,20],[1,6,2],[1,2,1],[1,4,2],[1,9,1],[1,10,1],[1,16,5],[1,3,2],[1,8,1],[1,6,3],[1,1,2],[1,6,1],[1,2,1],[1,28,1],[1,18,1],[1,17,8],[1,4,1],[1,2,2],[1,13,1],[1,25,3],[1,7,4],[1,3,1],[1,1,1],[1,3,3],[1,4,1],[1,7,5],[1,2,2],[1,5,1],[1,2,2],[1,2,2],[1,14,1],[1,3,3],[1,4,1],[1,1,2],[1,11,1],[1,2,1],[1,6,1],[1,7,6],[1,7,1],[1,2,2],[1,2,1],[1,31,4],[1,4,3],[1,14,6],[1,4,4],[1,1,1],[1,2,1],[1,12,5],[1,4,1],[1,7,1],[1,3,1],[1,4,1],[1,11,1],[1,12,1],[1,3,2],[1,9,1],[1,17,2],[1,9,5],[1,6,1],[1,13,2],[1,5,1],[1,4,3],[1,3,1],[1,1,4],[1,7,1],[1,4,1],[1,3,1],[1,56,3],[1,1,1],[1,9,1],[1,4,1],[1,15,1],[1,2,1],[1,12,1],[1,4,2],[1,1,1],[1,1,1],[1,149,2],[1,56,1],[1,4,5],[1,2,2],[1,11,3],[1,2,3],[1,1,2],[1,2,1],[1,15,4],[1,2,2],[1,4,1],[1,17,2],[1,10,5],[1,14,2],[1,8,2],[1,4,2],[1,4,1],[1,6,1],[1,5,1],[1,7,2],[1,20,5],[1,3,1],[1,4,1],[1,11,1],[1,2,1],[1,1,3],[1,5,2],[1,6,1],[1,4,3],[1,4,3],[1,4,2],[1,7,3],[1,5,1],[1,1,1],[1,2,1],[1,8,1],[1,7,1],[1,2,1],[1,1,1],[1,1,1],[1,4,3],[1,11,1],[1,43,1],[1,7,8],[1,8,1],[1,1,1],[1,8,6],[1,9,3],[1,19,1],[1,2,1],[1,43,3],[1,4,5],[1,2,3],[1,4,1],[1,17,1],[1,9,1],[1,8,72],[1,2,1],[1,4,2],[1,16,1],[1,15,1],[1,8,1],[1,3,1],[1,7,8],[1,4,1],[1,23,2],[1,1,2],[1,1,1],[1,15,7],[1,7,4],[1,3,4],[1,5,1],[1,1,1],[1,6,83],[1,1,1],[1,4,3],[1,2,1],[1,3,2],[1,9,2],[1,5,1],[1,22,1],[1,3,6],[1,6,4],[1,4,1],[1,1,4],[1,1,1],[1,5,3],[1,1,2],[1,15,2],[1,8,1],[1,5,2],[1,1,1],[1,4,10],[1,63,1],[1,2,2],[1,2,1],[1,9,1],[1,4,3],[1,2,1],[1,24,1],[1,2,2],[1,2,2],[1,6,2],[1,13,5],[1,34,5],[1,10,1],[1,3,1],[1,22,9],[1,41,1],[1,1,4],[1,13,2],[1,18,1],[1,4,4],[1,7,1],[1,4,3],[1,14,4],[1,3,2],[1,2,1],[1,7,10],[1,15,3],[1,6,1],[1,1,1],[1,2,5],[1,4,10],[1,5,2],[1,12,6],[1,6,1],[1,19,134],[1,11,1],[1,233,9],[1,4,2],[1,40,1],[1,2,1],[1,10,1],[1,3,1],[1,3,1],[1,3,1],[1,35,1],[1,2,7],[1,1,3],[1,3,1],[1,14,2],[1,1,1],[1,7,1],[1,6,5],[1,10,1],[1,5,3],[1,8,1],[1,11,1],[1,13,1],[1,8,9],[1,5,1],[1,3,1],[1,11,1],[1,2,1],[1,5,1],[1,7,1],[1,9,3],[1,2,3],[1,2,2],[1,29,2],[1,2,1],[1,4,3],[1,1,2],[1,2,2],[1,3,6],[1,11,1],[1,1,1],[1,11,1],[1,4,1],[1,6,1],[1,3,5],[1,4,1],[1,4,3],[1,34,1],[1,4,2],[1,1,9],[1,18,1],[1,9,3],[1,15,1],[1,4,4],[1,4,2],[1,9,1],[1,4,1],[1,10,1],[1,2,1],[1,2,4],[1,4,1],[1,1,2],[1,3,3],[1,2,1],[1,47,14],[1,3,1],[1,2,1],[1,3,1],[1,1,1],[1,20,1],[1,14,6],[1,2,2],[1,16,2],[1,2,1],[1,1,31],[1,5,9],[1,10,2],[1,10,3],[1,19,1],[1,1,1],[1,13,2],[1,5,1],[1,1,2],[1,1,2],[1,24,1],[1,9,2],[1,4,1],[1,10,3],[1,35,6],[1,1,1],[1,2,1],[1,1,1],[1,3,1],[1,4,5],[1,4,1],[1,1,1],[1,4,1],[1,10,2],[1,55,6],[1,3,22],[1,28,4],[1,6,3],[1,10,1],[1,6,187],[1,3,2],[1,12,5],[1,7,1],[1,4,1],[1,2,2],[1,2,1],[1,31,9],[1,2,8],[1,20,2],[1,36,2],[1,2,2],[1,15,5],[1,5,2],[1,3,2],[1,8,1],[1,1,1],[1,2,1],[1,37,1],[1,17,4],[1,8,1],[1,19,2],[1,7,1],[1,1,1],[1,1,1],[1,2,1],[1,9,1],[1,2,1],[1,2,1],[1,2,1],[1,19,1],[1,33,3],[1,4,1],[1,7,1],[1,3,1],[1,46,4],[1,2,1],[1,3,2],[1,1,2],[1,2,2],[1,14,1],[1,3,1],[1,11,2],[1,2,2],[1,21,2],[1,34,2],[1,4,1],[1,1,1],[1,2,1],[1,22,1],[1,64,9],[1,21,10],[1,3,3],[1,6,1],[1,16,2],[1,3,1],[1,31,4],[1,1,1],[1,1,2],[1,1,1],[1,3,1],[1,5,4],[1,27,1],[1,1,1],[1,2,2],[1,17,10],[1,4,1],[1,25,1],[1,41,1],[1,18,4],[1,17,40],[1,9,1],[1,2,1],[1,7,1],[1,21,2],[1,2,3],[1,3,1],[1,14,1],[1,8,2],[1,2,1],[1,2,2],[1,5,1],[1,1,2],[1,4,1],[1,6,5],[1,9,17],[1,5,1],[1,6,1],[1,4,1],[1,1,1],[1,3,1],[1,61,9],[1,6,1],[1,9,2],[1,2,2],[1,9,1],[1,7,4],[1,12,1],[1,2,2],[1,40,1],[1,17,13],[1,1,7],[1,11,2],[1,20,2],[1,2,1],[1,1,1],[1,12,10],[1,5,3],[1,2,1],[1,1,1],[1,23,2],[1,9,3],[1,4,1],[1,5,2],[1,4,1],[1,19,5],[1,5,1],[1,1,4],[1,5,1],[1,8,1],[1,9,1],[1,5,3],[1,43,3],[1,1,2],[1,3,1],[1,2,2],[1,15,38],[1,3,1],[1,25,1],[1,1,4],[1,5,6],[1,2,1],[1,4,3],[1,4,2],[1,3,1],[1,9,1],[1,4,1],[1,13,2],[1,7,4],[1,2,6],[1,12,1],[1,8,3],[1,1,4],[1,13,1],[1,3,4],[1,3,2],[1,2,2],[1,4,1],[1,6,1],[1,14,3],[1,7,1],[1,8,1],[1,8,1],[1,3,1],[1,32,5],[1,16,2],[1,2,3],[1,38,1],[1,5,4],[1,10,2],[1,2,7],[1,3,1],[1,8,1],[1,3,2],[1,1,3],[1,4,2],[1,71,12],[1,8,4],[1,2,12],[1,3,1],[1,12,2],[1,2,1],[1,5,1],[1,2,28],[1,19,5],[1,10,1],[1,9,2],[1,3,1],[1,7,6],[1,11,1],[1,2,1],[1,27,2],[1,7,4],[1,4,2],[1,12,8],[1,8,96],[1,12,1],[1,2,4],[1,7,5],[1,15,3],[1,3,2],[1,18,2],[1,25,3],[1,7,2],[1,18,2],[1,6,1],[1,10,2],[1,4,1],[1,1,3],[1,5,1],[1,19,2],[1,8,1],[1,50,4],[1,8,1],[1,11,1],[1,9,1],[1,2,1],[1,2,5],[1,3,1],[1,6,2],[1,1,1],[1,13,5],[1,19,1],[1,7,2],[1,17,1],[1,6,1],[1,4,1],[1,7,3],[1,13,3],[1,7,4],[1,5,2],[1,4,1],[1,11,16],[1,7,1],[1,1,1],[1,2,1],[1,2,1],[1,14,3],[1,30,1],[1,2,6],[1,6,2],[1,3,1],[1,4,1],[1,9,11],[1,6,1],[1,35,1],[1,2,8],[1,1,2],[1,3,2],[1,1,1],[1,9,1],[1,2,57],[1,2,1],[1,5,1],[1,4,2],[1,15,1],[1,12,3],[1,4,3],[1,17,1],[1,12,2],[1,21,12],[1,2,1],[1,9,1],[1,9,47],[1,49,4],[1,5,1],[1,4,1],[1,24,1],[1,2,2],[1,64,2],[1,48,7],[1,2,2],[1,10,2],[1,3,1],[1,11,1],[1,5,1],[1,1,2],[1,2,4],[1,6,1],[1,19,6],[1,6,2],[1,3,2],[1,1,1],[1,22,2],[1,3,2],[1,5,14],[1,2,1],[1,11,1],[1,4,2],[1,6,1],[1,24,10],[1,7,1],[1,2,74],[1,6,1],[1,28,1],[1,1,1],[1,1,1],[1,10,1],[1,88,4],[1,9,4],[1,26,1],[1,3,1],[1,4,1],[1,4,1],[1,6,1],[1,23,1],[1,2,7],[1,1,3],[1,7,1],[1,1,1],[1,5,2],[1,4,1],[1,2,1],[1,1,1],[1,15,5],[1,22,1],[1,6,3],[1,12,2],[1,48,14],[1,7,1],[1,5,1],[1,10,5],[1,5,1],[1,6,5],[1,2,3],[1,14,3],[1,3,1],[1,8,4],[1,2,5],[1,34,3],[1,2,1],[1,4,1],[1,6,7],[1,3,1],[1,3,3],[1,32,2],[1,3,1],[1,3,1],[1,2,1],[1,3,1],[1,39,8],[1,1,1],[1,15,8],[1,3,4],[1,2,3],[1,1,3],[1,38,18],[1,6,1],[1,25,4],[1,2,1],[1,8,1],[1,3,1],[1,24,1],[1,5,5],[1,5,4],[1,2,3],[1,2,1],[1,5,4],[1,51,1],[1,23,3],[1,2,1],[1,2,1],[1,1,2],[1,7,2],[1,3,1],[1,1,1],[1,4,1],[1,2,1],[1,7,6],[1,8,1],[1,11,1],[1,2,6],[1,2,1],[1,2,1],[1,1,1],[1,26,1],[1,3,1],[1,2,1],[1,2,1],[1,2,1],[1,12,2],[1,1,3],[1,3,1],[1,2,4],[1,19,3],[1,3,1],[1,3,2],[1,49,3],[1,2,1],[1,21,3],[1,1,1],[1,5,1],[1,4,1],[1,2,2],[1,2,1],[1,1,1],[1,7,4],[1,2,1],[1,2,1],[1,2,1],[1,3,2],[1,26,2],[1,9,1],[1,2,2],[1,12,1],[1,4,32],[1,4,1],[1,17,1],[1,1,2],[1,77,4],[1,2,1],[1,12,1],[1,2,1],[1,2,4],[1,5,2],[1,10,3],[1,4,3],[1,2,1],[1,1,3],[1,16,4],[1,3,1],[1,40,2],[1,13,1],[1,2,1],[1,6,2],[1,12,2],[1,6,11],[1,6,1],[1,1,1],[1,10,6],[1,1,1],[1,6,5],[1,38,4],[1,2,7],[1,9,1],[1,5,2],[1,3,1],[1,2,1],[1,5,2],[1,4,1],[1,1,1],[1,1,1],[1,4,2],[1,4,3],[1,5,2],[1,1,4],[1,11,4],[1,14,4],[1,4,1],[1,17,2],[1,2,2],[1,39,1],[1,9,21],[1,14,2],[1,4,4],[1,4,3],[1,9,2],[1,1,1],[1,3,2],[1,1,1],[1,1,7],[1,16,4],[1,5,1],[1,2,1],[1,2,1],[1,2,1],[1,98,19],[1,4,1],[1,1,1],[1,5,1],[1,7,1],[1,1,3],[1,9,1],[1,4,2],[1,2,1],[1,7,2],[1,2,1],[1,1,2],[1,1,1],[1,5,2],[1,6,1],[1,11,6],[1,5,4],[1,40,5],[1,1,2],[1,9,1],[1,2,1],[1,6,1],[1,5,1],[1,11,2],[1,4,1],[1,3,17],[1,1,1],[1,1,5],[1,9,5],[1,60,1],[1,3,7],[1,3,4],[1,5,1],[1,3,10],[1,5,2],[1,7,1],[1,2,1],[1,14,14],[1,4,3],[1,1,2],[1,2,4],[1,5,1],[1,11,7],[1,3,1],[1,29,3],[1,2,4],[1,8,1],[1,53,1],[1,10,1],[1,7,2],[1,2,13],[1,58,1],[1,5,6],[1,2,1],[1,4,2],[1,4,2],[1,4,2],[1,5,2],[1,2,3],[1,12,2],[1,4,6],[1,34,1],[1,1,1],[1,8,1],[1,4,1],[1,2,1],[1,2,2],[1,16,1],[1,4,2],[1,3,13],[1,2,2],[1,46,2],[1,4,1],[1,6,1],[1,1,2],[1,2,1],[1,3,6],[1,3,1],[1,19,1],[1,2,1],[1,23,1],[1,3,1],[1,1,1],[1,7,2],[1,4,4],[1,18,3],[1,1,1],[1,7,2],[1,2,2],[1,7,1],[1,2,1],[1,2,1],[1,6,1],[1,9,4],[1,3,1],[1,5,1],[1,13,1],[1,2,2],[1,33,1],[1,12,1],[1,9,3],[1,2,1],[1,1,1],[1,18,1],[1,1,3],[1,3,15],[1,2,4],[1,17,1],[1,1,1],[1,1,1],[1,4,8],[1,1,2],[1,31,19],[1,1,5],[1,7,6],[1,12,4],[1,2,4],[1,7,8],[1,4,2],[1,13,2],[1,19,18],[1,42,4],[1,3,1],[1,17,1],[1,3,3],[1,4,2],[1,12,1],[1,1,6],[1,23,2],[1,3,1],[1,20,1],[1,21,4],[1,1,1],[1,3,2],[1,10,1],[1,9,1],[1,8,6],[1,21,3],[1,5,1],[1,7,6],[1,2,1],[1,5,1],[1,1,2],[1,11,1],[1,8,212],[1,9,3],[1,6,1],[1,1,2],[1,25,12],[1,4,1],[1,14,15],[1,4,1],[1,13,1],[1,2,2],[1,3,1],[1,4,1],[1,3,1],[1,1,1],[1,3,1],[1,9,7],[1,1,1],[1,6,1],[1,8,2],[1,8,1],[1,2,3],[1,3,1],[1,2,3],[1,1,2],[1,10,1],[1,6,1],[1,12,3],[1,12,1],[1,1,1],[1,2,1],[1,2,4],[1,4,1],[1,2,1],[1,1,1],[1,4,1],[1,23,2],[1,4,2],[1,20,1],[1,17,4],[1,8,2],[1,4,6],[1,4,1],[1,6,1],[1,10,1],[1,6,2],[1,1,1],[1,3,1],[1,4,1],[1,4,1],[1,16,143],[1,7,1],[1,10,1],[1,7,2],[1,3,3],[1,8,3],[1,2,1],[1,49,1],[1,2,7],[1,14,4],[1,31,3],[1,29,1],[1,31,8],[1,5,2],[1,7,1],[1,1,1],[1,4,5],[1,1,1],[1,7,3],[1,1,2],[1,5,3],[1,3,1],[1,7,4],[1,129,9],[1,13,1],[1,11,4],[1,6,28],[1,6,1],[1,6,1],[1,20,1],[1,2,1],[1,16,3],[1,3,3],[1,5,1],[1,64,1],[1,4,2],[1,7,1],[1,21,3],[1,2,2],[1,9,1],[1,2,1],[1,5,6],[1,6,6],[1,3,1],[1,5,1],[1,3,1],[1,3,1],[1,6,2],[1,2,3],[1,4,1],[1,1,1],[1,12,37],[1,6,1],[1,1,1],[1,4,2],[1,4,8],[1,6,2],[1,2,2],[1,19,1],[1,1,1],[1,1,3],[1,3,1],[1,4,5],[1,15,2],[1,8,3],[1,1,1],[1,2,2],[1,3,1],[1,10,1],[1,4,1],[1,1,2],[1,19,1],[1,5,2],[1,4,4],[1,3,2],[1,3,17],[1,1,1],[1,1,1],[1,2,1],[1,18,3],[1,3,1],[1,16,4],[1,5,1],[1,11,2],[1,19,8],[1,2,1],[1,2,1],[1,1,6],[1,3,1],[1,2,1],[1,1,1],[1,2,1],[1,11,3],[1,17,4],[1,4,1],[1,4,4],[1,5,2],[1,1,1],[1,1,2],[1,10,12],[1,2,2],[1,8,1],[1,1,2],[1,8,1],[1,17,2],[1,2,1],[1,4,1],[1,6,1],[1,20,21],[1,5,7],[1,3,1],[1,13,2],[1,3,6],[1,8,3],[1,12,1],[1,12,2],[1,3,2],[1,15,2],[1,6,1],[1,9,5],[1,5,3],[1,4,1],[1,7,4],[1,4,4],[1,9,4],[1,11,1],[1,3,1],[1,17,1],[1,71,5],[1,7,1],[1,3,1],[1,5,1],[1,1,1],[1,1,2],[1,2,1],[1,1,2],[1,10,2],[1,3,1],[1,2,2],[1,5,1],[1,28,4],[1,2,1],[1,1,1],[1,9,1],[1,3,2],[1,8,2],[1,13,1],[1,2,1],[1,6,1],[1,25,79],[1,30,24],[1,10,31],[1,5,1],[1,9,1],[1,1,1],[1,4,1],[1,118,14],[1,18,3],[1,30,1],[1,10,3],[1,5,1],[1,5,1],[1,1,1],[1,6,1],[1,9,3],[1,6,2],[1,5,1],[1,2,2],[1,3,1],[1,7,4],[1,8,2],[1,10,2],[1,1,8],[1,41,1],[1,21,4],[1,6,1],[1,13,3],[1,5,1],[1,34,7],[1,22,1],[1,9,8],[1,5,3],[1,11,1],[1,2,1],[1,6,1],[1,4,1],[1,72,1],[1,44,3],[1,2,1],[1,1,1],[1,3,1],[1,8,2],[1,1,3],[1,14,1],[1,3,2],[1,1,1],[1,9,2],[1,17,1],[1,9,35],[1,3,1],[1,6,1],[1,2,11],[1,5,3],[1,1,1],[1,2,1],[1,14,7],[1,51,44],[1,3,6],[1,1,1],[1,6,2],[1,2,1],[1,11,2],[1,8,3],[1,3,2],[1,3,3],[1,4,1],[1,2,1],[1,5,1],[1,8,5],[1,60,1],[1,6,3],[1,36,2],[1,1,1],[1,2,1],[1,10,2],[1,26,2],[1,7,3],[1,6,1],[1,6,2],[1,3,3],[1,2,3],[1,6,2],[1,2,2],[1,2,2],[1,5,2],[1,2,1],[1,15,5],[1,1,2],[1,1,3],[1,37,24],[1,8,2],[1,17,2],[1,31,1],[1,14,2],[1,2,1],[1,16,2],[1,3,1],[1,2,2],[1,1,2],[1,2,3],[1,4,2],[1,1,1],[1,9,5],[1,1,2],[1,1,4],[1,4,18],[1,6,1],[1,12,1],[1,3,85],[1,17,2],[1,4,1],[1,7,1],[1,4,1],[1,3,1],[1,22,2],[1,1,1],[1,15,27],[1,4,1],[1,1,1],[1,1,3],[1,3,1],[1,35,2],[1,1,1],[1,33,4],[1,2,1],[1,3,3],[1,6,1],[1,9,1],[1,8,1],[1,6,1],[1,16,2],[1,20,2],[1,5,1],[1,1,5],[1,2,2],[1,12,25],[1,6,1],[1,13,1],[1,2,1],[1,2,1],[1,10,1],[1,2,1],[1,37,3],[1,2,1],[1,58,11],[1,14,3],[1,6,1],[1,6,1],[1,1,3],[1,1,1],[1,9,2],[1,1,502],[1,45,5],[1,5,1],[1,4,1],[1,2,8],[1,5,1],[1,1,1],[1,7,1],[1,4,1],[1,3,4],[1,1,1],[1,10,1],[1,9,1],[1,13,1],[1,10,8],[1,4,4],[1,7,1],[1,1,2],[1,2,2],[1,9,2],[1,13,2],[1,8,1],[1,1,1],[1,2,4],[1,29,1],[1,8,2],[1,7,3],[1,30,7],[1,1,1],[1,10,10],[1,3,1],[1,1,1],[1,5,1],[1,4,3],[1,7,1],[1,43,8],[1,1,2],[1,9,1],[1,1,1],[1,3,6],[1,9,1],[1,1,1],[1,7,1],[1,6,1],[1,2,2],[1,13,4],[1,13,3],[1,2,3],[1,8,1],[1,11,2],[1,9,53],[1,2,1],[1,16,1],[1,6,3],[1,48,3],[1,4,1],[1,7,3],[1,2,2],[1,8,1],[1,8,1],[1,26,2],[1,3,1],[1,8,2],[1,121,2],[1,2,2],[1,8,1],[1,2,2],[1,4,2],[1,8,1],[1,1,1],[1,4,1],[1,3,3],[1,7,1],[1,7,2],[1,2,1],[1,8,2],[1,34,28],[1,3,2],[1,3,1],[1,5,1],[1,9,1],[1,7,1],[1,14,4],[1,1,1],[1,34,4],[1,1,1],[1,6,1],[1,3,1],[1,2,1],[1,4,1],[1,5,2],[1,10,1],[1,41,5],[1,7,2],[1,19,4],[1,3,3],[1,12,3],[1,7,1],[1,4,2],[1,16,1],[1,3,1],[1,8,4],[1,9,2],[1,8,2],[1,2,1],[1,10,2],[1,8,1],[1,16,2],[1,7,2],[1,5,1],[1,2,3],[1,15,4],[1,3,5],[1,4,4],[1,1,1],[1,3,2],[1,5,1],[1,8,4],[1,4,1],[1,41,7],[1,2,1],[1,1,3],[1,1,6],[1,2,1],[1,10,2],[1,10,2],[1,3,3],[1,39,4],[1,1,2],[1,5,7],[1,12,2],[1,15,5],[1,4,1],[1,13,1],[1,3,1],[1,44,3],[1,1,2],[1,1,1],[1,6,1],[1,3,1],[1,3,2],[1,7,15],[1,1,1],[1,11,4],[1,3,1],[1,1,3],[1,1,1],[1,2,1],[1,9,4],[1,22,1],[1,46,2],[1,3,18],[1,22,8],[1,3,1],[1,4,10],[1,12,16],[1,2,1],[1,8,3],[1,1,1],[1,2,4],[1,1,1],[1,6,4],[1,7,1],[1,7,4],[1,14,4],[1,1,1],[1,13,2],[1,61,1],[1,6,2],[1,16,1],[1,14,7],[1,9,2],[1,18,2],[1,9,3],[1,1,2],[1,4,1],[1,6,1],[1,6,4],[1,10,1],[1,5,2],[1,7,1],[1,3,1],[1,11,2],[1,53,1],[1,10,2],[1,17,1],[1,2,2],[1,5,14],[1,17,1],[1,2,1],[1,5,1],[1,28,2],[1,8,2],[1,4,1],[1,4,2],[1,21,1],[1,3,1],[1,3,2],[1,5,2],[1,5,1],[1,3,13],[1,13,2],[1,124,753],[1,2,2],[1,43,1],[1,6,1],[1,2,2],[1,11,1],[1,22,1],[1,5,2],[1,5,1],[1,8,1],[1,2,4],[1,2,2],[1,9,1],[1,6,1],[1,2,1],[1,6,1],[1,14,3],[1,21,1],[1,3,4],[1,3,3],[1,3,1],[1,2,2],[1,2,2],[1,5,2],[1,11,1],[1,6,1],[1,3,1],[1,64,1],[1,6,1],[1,2,12],[1,5,1],[1,6,4],[1,10,1],[1,14,1],[1,14,1],[1,2,1],[1,2,1],[1,8,4],[1,17,2],[1,5,3],[1,64,1],[1,33,3],[1,18,2],[1,1,1],[1,42,9],[1,20,2],[1,10,2],[1,2,2],[1,3,1],[1,13,1],[1,5,1],[1,39,5],[1,8,2],[1,6,1],[1,3,2],[1,12,1],[1,2,4],[1,8,1],[1,2,1],[1,4,5],[1,7,1],[1,2,1],[1,2,1],[1,5,2],[1,15,3],[1,6,1],[1,1,1],[1,11,2],[1,4,2],[1,1,1],[1,7,3],[1,7,2],[1,3,1],[1,3,1],[1,2,1],[1,8,3],[1,3,1],[1,7,12],[1,8,1],[1,4,2],[1,6,2],[1,9,1],[1,3,30],[1,8,3],[1,8,2],[1,8,1],[1,11,1],[1,13,1],[1,2,1],[1,16,1],[1,10,1],[1,3,1],[1,6,4],[1,29,2],[1,4,2],[1,4,1],[1,1,1],[1,7,1],[1,1,1],[1,4,11],[1,1,1],[1,6,1],[1,26,1],[1,3,1],[1,2,1],[1,10,1],[1,4,1],[1,14,2],[1,10,1],[1,5,2],[1,5,1],[1,2,1],[1,26,33],[1,1,1],[1,11,2],[1,8,5],[1,18,1],[1,2,1],[1,5,1],[1,4,2],[1,5,1],[1,11,2],[1,1,2],[1,2,2],[1,6,6],[1,10,1],[1,14,1],[1,2,1],[1,13,1],[1,14,1],[1,8,2],[1,21,2],[1,1,2],[1,1,1],[1,14,1],[1,2,1],[1,15,2],[1,4,1],[1,3,1],[1,10,2],[1,4,2],[1,5,1],[1,11,22],[1,8,3],[1,4,1],[1,3,2],[1,1,2],[1,25,3],[1,2,1],[1,11,2],[1,5,2],[1,39,1],[1,1,1],[1,415,128],[1,6,1],[1,5,1],[1,8,5],[1,2,3],[1,1,1],[1,1,1],[1,4,1],[1,2,4],[1,4,1],[1,2,9],[1,4,2],[1,23,3],[1,6,9],[1,5,4],[1,2,5],[1,1,1],[1,7,1],[1,3,7],[1,1,2],[1,2,16],[1,5,2],[1,1,3],[1,4,1],[1,11,1],[1,2,2],[1,2,1],[1,10,1],[1,6,2],[1,11,1],[1,28,1],[1,21,3],[1,3,2],[1,3,1],[1,4,1],[1,1,2],[1,7,1],[1,11,4],[1,4,2],[1,22,4],[1,1,1],[1,1,1],[1,12,7],[1,1,1],[1,4,2],[1,2,1],[1,6,4],[1,14,3],[1,8,2],[1,1,11],[1,13,2],[1,4,1],[1,3,2],[1,95,10],[1,1,2],[1,4,2],[1,27,2],[1,2,1],[1,19,1],[1,13,4],[1,1,1],[1,37,1],[1,4,1],[1,5,1],[1,7,5],[1,1,1],[1,4,5],[1,5,1],[1,1,1],[1,16,2],[1,22,1],[1,4,2],[1,24,4],[1,10,1],[1,77,6],[1,21,1],[1,11,1],[1,2,1],[1,1,1],[1,4,5],[1,2,4],[1,55,4],[1,17,1],[1,1,3],[1,2,2],[1,7,1],[1,17,1],[1,34,2],[1,4,1],[1,2,2],[1,1,2],[1,100,1],[1,17,2],[1,8,6],[1,11,2],[1,11,2],[1,3,1],[1,5,2],[1,1,1],[1,6,7],[1,15,5],[1,7,1],[1,4,1],[1,5,1],[1,6,2],[1,7,1],[1,2,2],[1,10,2],[1,17,1],[1,10,2],[1,6,3],[1,21,1],[1,2,1],[1,78,4],[1,6,1],[1,1,2],[1,5,1],[1,186,9],[1,16,3],[1,15,13],[1,30,4],[1,2,1],[1,15,3],[1,13,1],[1,3,1],[1,1,1],[1,2,2],[1,5,5],[1,7,1],[1,16,1],[1,2,1],[1,14,2],[1,11,5],[1,9,1],[1,13,2],[1,2,1],[1,4,64],[1,4,1],[1,18,4],[1,3,1],[1,1,1],[1,16,2],[1,4,1],[1,11,4],[1,9,3],[1,3,1],[1,4,1],[1,1,1],[1,10,3],[1,7,1],[1,13,1],[1,16,4],[1,1,16],[1,2,2],[1,18,6],[1,42,2],[1,1,3],[1,15,1],[1,3,1],[1,43,1],[1,1,1],[1,27,2],[1,1,3],[1,1,5],[1,13,1],[1,1,1],[1,10,11],[1,8,1],[1,9,1],[1,13,1],[1,1,2],[1,13,3],[1,1,1],[1,5,1],[1,14,2],[1,14,1],[1,13,1],[1,4,3],[1,25,1],[1,1,3],[1,3,3],[1,4,1],[1,1,1],[1,4,4],[1,15,1],[1,2,1],[1,1,1],[1,7,12],[1,68,2],[1,13,2],[1,2,1],[1,6,4],[1,46,6],[1,1,1],[1,2,2],[1,4,1],[1,2,1],[1,11,5],[1,1,1],[1,9,1],[1,9,1],[1,13,1],[1,4,1],[1,14,1],[1,42,9],[1,5,1],[1,4,1],[1,24,7],[1,7,1],[1,17,1],[1,2,1],[1,2,5],[1,3,6],[1,2,1],[1,15,4],[1,3,2],[1,33,2],[1,30,4],[1,27,4],[1,1,1],[1,14,4],[1,2,3],[1,26,7],[1,22,1],[1,2,2],[1,2,2],[1,166,3],[1,4,4],[1,9,1],[1,12,15],[1,2,6],[1,13,2],[1,4,3],[1,9,2],[1,2,3],[1,3,3],[1,9,2],[1,22,1],[1,5,3],[1,3,4],[1,2,3],[1,3,1],[1,23,1],[1,18,1],[1,6,1],[1,4,1],[1,9,3],[1,35,1],[1,73,2],[1,1,3],[1,31,5],[1,25,1],[1,3,4],[1,11,1],[1,9,4],[1,2,1],[1,27,36],[1,23,5],[1,4,2],[1,1,2],[1,29,2],[1,3,2],[1,1,1],[1,4,1],[1,12,1],[1,36,16],[1,5,14],[1,19,1],[1,6,1],[1,6,1],[1,4,1],[1,6,1],[1,4,2],[1,9,7],[1,7,1],[1,30,4],[1,4,1],[1,18,3],[1,2,2],[1,3,1],[1,9,2],[1,2,2],[1,1,2],[1,1,2],[1,14,1],[1,3,1],[1,5,2],[1,10,1],[1,9,1],[1,10,3],[1,4,1],[1,2,1],[1,4,4],[1,2,1],[1,3,3],[1,39,2],[1,3,1],[1,1,3],[1,14,1],[1,2,4],[1,13,1],[1,4,6],[1,3,5],[1,5,4],[1,8,1],[1,131,1],[1,28,1],[1,5,1],[1,8,5],[1,2,9],[1,4,2],[1,5,1],[1,46,3],[1,7,3],[1,1,1],[1,7,3],[1,2,1],[1,4,1],[1,2,1],[1,2,1],[1,2,1],[1,4,6],[1,5,1],[1,9,3],[1,2,2],[1,9,1],[1,42,3],[1,11,3],[1,5,1],[1,1,2],[1,6,1],[1,37,51],[1,2,1],[1,4,3],[1,23,2],[1,1,15],[1,5,4],[1,1,4],[1,18,3],[1,12,3],[1,4,2],[1,4,1],[1,2,7],[1,2,6],[1,3,6],[1,6,1],[1,10,3],[1,4,2],[1,1,2],[1,4,1],[1,4,3],[1,1,3],[1,3,1],[1,6,2],[1,10,2],[1,6,4],[1,4,3],[1,7,2],[1,2,2],[1,4,1],[1,1,1],[1,4,5],[1,14,1],[1,20,4],[1,7,15],[1,18,2],[1,6,1],[1,1,1],[1,7,1],[1,5,2],[1,6,2],[1,4,1],[1,6,3],[1,2,1],[1,6,1],[1,4,1],[1,7,1],[1,7,4],[1,7,1],[1,1,1],[1,24,4],[1,2,2],[1,3,5],[1,8,1],[1,15,2],[1,5,1],[1,2,3],[1,2,2],[1,4,1],[1,6,1],[1,2,3],[1,11,1],[1,23,5],[1,2,2],[1,1,1],[1,8,1],[1,17,6],[1,1,1],[1,9,2],[1,1,1],[1,10,1],[1,5,1],[1,6,1],[1,6,1],[1,5,1],[1,2,6],[1,2,1],[1,9,1],[1,14,1],[1,18,8],[1,39,2],[1,13,1],[1,6,1],[1,6,2],[1,9,1],[1,14,1],[1,5,4],[1,26,2],[1,4,1],[1,7,2],[1,5,5],[1,2,1],[1,20,2],[1,14,1],[1,10,1],[1,4,1],[1,3,1],[1,10,2],[1,9,12],[1,4,4],[1,2,1],[1,4,1],[1,4,1],[1,2,1],[1,8,1],[1,2,4],[1,1,1],[1,33,2],[1,4,1],[1,5,1],[1,205,1],[1,2,1],[1,15,3],[1,5,1],[1,1,1],[1,1,1],[1,1,1],[1,13,1],[1,14,5],[1,6,4],[1,3,1],[1,7,5],[1,42,2],[1,11,1],[1,24,2],[1,11,2],[1,11,2],[1,12,1],[1,7,1],[1,1,1],[1,3,2],[1,21,1],[1,13,1],[1,2,1],[1,37,6],[1,8,4],[1,2,2],[1,2,2],[1,36,1],[1,8,1],[1,19,11],[1,19,7],[1,8,1],[1,18,2],[1,7,2],[1,8,1],[1,1,1],[1,4,1],[1,3,3],[1,10,1],[1,6,1],[1,4,1],[1,10,1],[1,25,1],[1,14,1],[1,14,3],[1,4,1],[1,2,1],[1,2,2],[1,4,2],[1,3,4],[1,62,11],[1,4,1],[1,39,3],[1,65,2],[1,3,1],[1,11,2],[1,4,1],[1,2,2],[1,1,1],[1,2,3],[1,2,1],[1,17,7],[1,7,4],[1,1,4],[1,62,3],[1,17,3],[1,26,3],[1,15,1],[1,2,1],[1,4,6],[1,1,2],[1,8,2],[1,16,2],[1,1,1],[1,7,2],[1,4,1],[1,1,1],[1,7,2],[1,8,2],[1,12,1],[1,1,2],[1,2,1],[1,2,1],[1,26,7],[1,2,1],[1,5,1],[1,5,1],[1,5,1],[1,1,1],[1,6,27],[1,5,4],[1,6,1],[1,8,1],[1,38,2],[1,26,2],[1,13,1],[1,20,2],[1,6,6],[1,2,2],[1,2,1],[1,16,2],[1,88,1],[1,4,1],[1,5,3],[1,1,4],[1,1,4],[1,12,2],[1,3,1],[1,3,1],[1,3,1],[1,2,3],[1,6,1],[1,2,4],[1,28,2],[1,17,3],[1,10,1],[1,51,3],[1,1,1],[1,15,4],[1,10,14],[1,1,3],[1,3,3],[1,1,1],[1,5,1],[1,3,1],[1,23,3],[1,10,1],[1,1,1],[1,21,6],[1,11,1],[1,8,1],[1,1,1],[1,2,1],[1,1,3],[1,26,1],[1,1,2],[1,4,1],[1,4,1],[1,6,1],[1,6,1],[1,2,2],[1,11,5],[1,15,2],[1,13,1],[1,2,2],[1,4,1],[1,4,1],[1,2,6],[1,13,3],[1,23,2],[1,18,2],[1,8,2],[1,1,1],[1,4,1],[1,7,1],[1,2,1],[1,8,6],[1,12,1],[1,23,4],[1,9,4],[1,2,2],[1,8,1],[1,7,2],[1,2,2],[1,2,4],[1,8,16],[1,22,3],[1,2,1],[1,2,4],[1,2,1],[1,9,2],[1,3,3],[1,4,1],[1,3,9],[1,3,1],[1,2,2],[1,2,3],[1,11,1],[1,5,1],[1,5,1],[1,2,2],[1,10,20],[1,2,2],[1,2,1],[1,3,3],[1,10,1],[1,2,3],[1,2,1],[1,5,1],[1,4,2],[1,8,1],[1,2,2],[1,6,1],[1,5,1],[1,9,1],[1,3,2],[1,1,1],[1,2,6],[1,1,1],[1,5,1],[1,2,1],[1,16,1],[1,6,1],[1,2,1],[1,2,1],[1,5,1],[1,9,1],[1,10,16],[1,4,1],[1,4,2],[1,5,2],[1,8,1],[1,16,2],[1,2,1],[1,5,1],[1,1,2],[1,55,2],[1,20,1],[1,11,1],[1,5,2],[1,13,1],[1,1,1],[1,10,6],[1,5,2],[1,21,1],[1,7,3],[1,5,1],[1,7,1],[1,3,1],[1,6,1],[1,46,3],[1,8,5],[1,5,1],[1,2,1],[1,2,6],[1,22,1],[1,42,1],[1,1,1],[1,4,2],[1,13,1],[1,3,3],[1,2,2],[1,4,2],[1,1,3],[1,88,1],[1,24,4],[1,4,1],[1,3,1],[1,5,1],[1,17,6],[1,6,2],[1,20,3],[1,47,2],[1,2,7],[1,13,1],[1,1,3],[1,1,2],[1,2,2],[1,2,2],[1,4,3],[1,7,1],[1,3,1],[1,10,1],[1,2,1],[1,2,5],[1,1,2],[1,17,2],[1,12,4],[1,24,1],[1,3,1],[1,1,3],[1,6,1],[1,2,5],[1,3,1],[1,1,1],[1,13,2],[1,6,1],[1,2,1],[1,10,2],[1,4,1],[1,1,1],[1,18,7],[1,7,2],[1,8,1],[1,5,1],[1,2,1],[1,4,1],[1,2,2],[1,14,1],[1,13,1],[1,10,4],[1,4,4],[1,6,4],[1,4,1],[1,16,2],[1,8,2],[1,3,3],[1,3,1],[1,21,2],[1,7,1],[1,2,1],[1,2,1],[1,2,3],[1,4,1],[1,6,1],[1,28,1],[1,2,7],[1,3,1],[1,23,4],[1,2,1],[1,6,1],[1,2,1],[1,4,1],[1,3,2],[1,1,1],[1,9,2],[1,9,2],[1,2,1],[1,4,2],[1,10,1],[1,12,1],[1,4,2],[1,7,1],[1,2,2],[1,9,1],[1,16,5],[1,31,2],[1,16,2],[1,22,3],[1,2,1],[1,6,1],[1,1,1],[1,6,3],[1,14,2],[1,5,3],[1,81,3],[1,8,2],[1,1,1],[1,61,9],[1,1,4],[1,2,1],[1,11,3],[1,3,5],[1,3,6],[1,4,7],[1,1,2],[1,5,2],[1,2,1],[1,3,2],[1,9,5],[1,9,1],[1,1,3],[1,3,2],[1,13,3],[1,14,1],[1,15,6],[1,6,1],[1,2,1],[1,7,1],[1,2,1],[1,10,2],[1,2,2],[1,14,1],[1,2,2],[1,3,3],[1,3,1],[1,4,1],[1,59,2],[1,5,2],[1,4,2],[1,1,1],[1,2,1],[1,4,1],[1,2,2],[1,5,4],[1,4,1],[1,4,1],[1,10,3],[1,2,2],[1,2,3],[1,8,1],[1,2,1],[1,1,1],[1,18,1],[1,6,1],[1,12,3],[1,5,3],[1,3,1],[1,7,3],[1,10,2],[1,2,23],[1,1,12],[1,1,1],[1,32,3],[1,2,1],[1,4,1],[1,12,2],[1,4,1],[1,3,1],[1,5,1],[1,4,2],[1,4,1],[1,16,2],[1,1,1],[1,4,1],[1,7,1],[1,2,4],[1,8,1],[1,4,4],[1,1,1],[1,1,2],[1,6,3],[1,8,2],[1,23,15],[1,2,2],[1,2,1],[1,2,1],[1,11,1],[1,3,2],[1,9,2],[1,4,2],[1,2,3],[1,34,1],[1,7,1],[1,2,4],[1,65,2],[1,41,3],[1,1,2],[1,1,1],[1,6,1],[1,6,1],[1,7,1],[1,3,1],[1,14,9],[1,6,1],[1,6,5],[1,2,13],[1,5,2],[1,2,1],[1,4,1],[1,17,1],[1,5,1],[1,1,1],[1,3,2],[1,9,1],[1,1,4],[1,48,2],[1,7,1],[1,4,1],[1,3,1],[1,4,2],[1,118,3],[1,2,1],[1,2,4],[1,2,1],[1,12,13],[1,2,1],[1,4,2],[1,4,1],[1,6,1],[1,1,1],[1,7,2],[1,10,1],[1,21,5],[1,5,2],[1,9,1],[1,2,2],[1,1,1],[1,1,1],[1,1,1],[1,3,1],[1,1,1],[1,7,1],[1,83,9],[1,6,2],[1,7,2],[1,13,1],[1,4,2],[1,3,1],[1,8,2],[1,2,1],[1,10,3],[1,2,1],[1,2,1],[1,9,11],[1,2,1],[1,3,1],[1,17,1],[1,7,2],[1,8,2],[1,20,1],[1,2,1],[1,1,2],[1,8,1],[1,2,1],[1,6,1],[1,21,3],[1,1,2],[1,5,5],[1,2,1],[1,2,3],[1,2,1],[1,2,2],[1,16,1],[1,2,1],[1,2,1],[1,3,1],[1,17,1],[1,6,1],[1,4,15],[1,1,1],[1,11,1],[1,84,15],[1,31,3],[1,2,2],[1,8,1],[1,9,1],[1,2,3],[1,15,2],[1,4,1],[1,18,1],[1,3,1],[1,1,1],[1,2,4],[1,2,2],[1,2,1],[1,2,1],[1,25,1],[1,3,1],[1,141,13],[1,4,2],[1,2,2],[1,14,2],[1,7,1],[1,30,9],[1,17,1],[1,1,2],[1,6,1],[1,2,1],[1,2,1],[1,8,1],[1,2,1],[1,10,1],[1,6,3],[1,12,1],[1,68,1],[1,2,1],[1,10,2],[1,14,2],[1,26,9],[1,7,3],[1,3,3],[1,6,6],[1,3,1],[1,18,4],[1,3,1],[1,4,4],[1,2,1],[1,1,1],[1,37,8],[1,8,6],[1,2,1],[1,9,6],[1,5,2],[1,3,1],[1,3,2],[1,2,1],[1,3,1],[1,13,7],[1,9,1],[1,122,2],[1,2,1],[1,22,6],[1,11,2],[1,16,2],[1,28,46],[1,2,4],[1,7,1],[1,2,3],[1,2,6],[1,2,2],[1,1,2],[1,1,1],[1,5,1],[1,1,2],[1,3,2],[1,7,6],[1,11,1],[1,21,1],[1,40,6],[1,14,2],[1,21,1],[1,1,1],[1,14,2],[1,21,1],[1,2,1],[1,1,1],[1,1,2],[1,40,2],[1,4,2],[1,1,3],[1,1,1],[1,107,2],[1,4,6],[1,136,6],[1,5,1],[1,9,1],[1,24,3],[1,7,1],[1,10,5],[1,29,3],[1,12,2],[1,10,3],[1,5,3],[1,2,1],[1,59,1],[1,5,2],[1,13,2],[1,1,2],[1,50,2],[1,1,3],[1,2,3],[1,6,1],[1,4,2],[1,5,4],[1,3,2],[1,8,1],[1,4,2],[1,1,1],[1,17,1],[1,13,3],[1,2,1],[1,7,1],[1,3,1],[1,8,1],[1,1,1],[1,20,1],[1,4,4],[1,1,2],[1,2,1],[1,2,1],[1,2,2],[1,1,2],[1,13,2],[1,4,1],[1,4,1],[1,3,1],[1,2,1],[1,4,4],[1,13,5],[1,9,1],[1,8,1],[1,12,1],[1,15,3],[1,2,1],[1,2,2],[1,4,1],[1,2,2],[1,1,1],[1,3,1],[1,13,1],[1,4,1],[1,9,4],[1,3,2],[1,2,1],[1,4,4],[1,1,3],[1,15,1],[1,4,1],[1,2,1],[1,3,1],[1,2,1],[1,3,6],[1,5,1],[1,7,10],[1,1,2],[1,6,2],[1,7,2],[1,3,1],[1,3,3],[1,6,1],[1,13,1],[1,22,3],[1,6,5],[1,6,1],[1,3,1],[1,3,1],[1,21,5],[1,11,2],[1,6,3],[1,38,4],[1,6,4],[1,4,1],[1,2,1],[1,5,5],[1,5,3],[1,40,1],[1,4,3],[1,8,1],[1,13,2],[1,4,2],[1,1,1],[1,9,9],[1,1,1],[1,12,2],[1,36,1],[1,2,1],[1,18,3],[1,28,1],[1,5,1],[1,20,4],[1,40,3],[1,3,1],[1,5,3],[1,2,1],[1,31,3],[1,6,1],[1,3,1],[1,1,5],[1,3,3],[1,36,1],[1,1,1],[1,22,2],[1,9,2],[1,2,4],[1,2,2],[1,4,4],[1,2,1],[1,6,1],[1,3,3],[1,5,1],[1,13,2],[1,4,1],[1,1,3],[1,1,1],[1,11,5],[1,4,1],[1,2,3],[1,26,1],[1,9,1],[1,6,1],[1,15,1],[1,23,5],[1,3,5],[1,4,3],[1,8,1],[1,9,4],[1,2,1],[1,7,1],[1,1,6],[1,4,1],[1,43,1],[1,2,3],[1,1,1],[1,15,4],[1,3,1],[1,1,1],[1,10,1],[1,79,1],[1,1,14],[1,2,1],[1,6,1],[1,1,1],[1,24,1],[1,2,3],[1,9,2],[1,2,3],[1,8,1],[1,115,15],[1,1,1],[1,1,2],[1,3,1],[1,9,24],[1,6,1],[1,3,6],[1,10,3],[1,3,1],[1,1,1],[1,3,2],[1,2,1],[1,11,1],[1,5,1],[1,1,1],[1,2,1],[1,3,1],[1,5,1],[1,11,1],[1,2,1],[1,7,7],[1,15,1],[1,6,2],[1,51,7],[1,2,1],[1,54,1],[1,5,1],[1,1,1],[1,7,5],[1,1,1],[1,4,1],[1,3,1],[1,22,4],[1,5,3],[1,5,1],[1,64,9],[1,6,1],[1,28,6],[1,5,1],[1,11,1],[1,2,2],[1,4,2],[1,1,4],[1,8,1],[1,1,5],[1,7,1],[1,2,1],[1,2,2],[1,8,1],[1,11,3],[1,8,3],[1,7,1],[1,10,5],[1,5,1],[1,98,5],[1,18,1],[1,1,1],[1,5,1],[1,2,2],[1,14,2],[1,3,1],[1,1,1],[1,11,3],[1,7,9],[1,5,3],[1,3,1],[1,3,3],[1,125,34],[1,1,1],[1,2,1],[1,6,2],[1,2,2],[1,11,7],[1,5,2],[1,5,5],[1,6,1],[1,10,2],[1,14,2],[1,4,3],[1,8,7],[1,2,3],[1,2,2],[1,13,1],[1,6,1],[1,10,5],[1,11,1],[1,4,2],[1,14,1],[1,1,6],[1,15,1],[1,1,3],[1,5,3],[1,7,1],[1,2,1],[1,1,3],[1,2,4],[1,3,1],[1,8,3],[1,2,3],[1,2,1],[1,2,2],[1,2,1],[1,4,1],[1,16,2],[1,1,2],[1,1,5],[1,7,1],[1,3,1],[1,2,1],[1,16,3],[1,4,1],[1,8,2],[1,16,6],[1,12,2],[1,84,26],[1,10,2],[1,2,2],[1,5,1],[1,1,1],[1,8,1],[1,4,1],[1,4,1],[1,4,2],[1,4,1],[1,4,10],[1,14,2],[1,4,2],[1,5,2],[1,19,1],[1,4,3],[1,8,2],[1,6,1],[1,2,5],[1,2,1],[1,16,4],[1,4,1],[1,2,2],[1,7,1],[1,4,2],[1,4,1],[1,8,1],[1,10,2],[1,3,2],[1,3,1],[1,10,2],[1,1,1],[1,12,3],[1,37,1],[1,10,1],[1,16,4],[1,1,1],[1,11,1],[1,4,1],[1,8,6],[1,3,2],[1,66,2],[1,14,1],[1,2,4],[1,2,2],[1,7,2],[1,24,2],[1,5,1],[1,1,1],[1,1,1],[1,3,1],[1,31,2],[1,24,1],[1,8,5],[1,8,2],[1,3,4],[1,64,1],[1,1,4],[1,4,47],[1,8,4],[1,25,1],[1,19,2],[1,4,1],[1,33,4],[1,16,2],[1,4,1],[1,1,1],[1,2,3],[1,27,1],[1,20,1],[1,10,3],[1,2,1],[1,2,1],[1,76,1],[1,2,1],[1,5,1],[1,2,2],[1,15,3],[1,40,2],[1,4,22],[1,2,2],[1,2,2],[1,10,1],[1,3,1],[1,55,4],[1,2,7],[1,7,1],[1,4,6],[1,2,1],[1,2,1],[1,28,1],[1,2,2],[1,6,2],[1,6,2],[1,4,15],[1,3,2],[1,1,1],[1,29,1],[1,13,1],[1,16,1],[1,4,1],[1,7,7],[1,3,3],[1,16,4],[1,12,11],[1,1,1],[1,2,4],[1,54,2],[1,1,2],[1,6,2],[1,1,3],[1,2,2],[1,1,1],[1,2,1],[1,11,4],[1,9,1],[1,20,1],[1,1,1],[1,17,3],[1,1,1],[1,9,2],[1,2,2],[1,3,1],[1,29,19],[1,28,1],[1,8,3],[1,21,8],[1,7,3],[1,6,2],[1,5,2],[1,11,1],[1,1,2],[1,7,1],[1,22,1],[1,9,1],[1,3,3],[1,8,2],[1,5,1],[1,23,2],[1,11,5],[1,17,2],[1,5,5],[1,4,3],[1,33,1],[1,2,3],[1,6,1],[1,32,1],[1,6,2],[1,64,2],[1,3,1],[1,7,1],[1,3,6],[1,12,1],[1,1,1],[1,9,1],[1,38,3],[1,1,1],[1,3,1],[1,3,5],[1,78,16],[1,3,1],[1,7,1],[1,26,1],[1,9,2],[1,113,2],[1,9,1],[1,5,9],[1,3,2],[1,4,1],[1,2,1],[1,5,1],[1,24,3],[1,11,4],[1,38,2],[1,13,3],[1,7,3],[1,1,1],[1,1,2],[1,3,3],[1,5,3],[1,6,1],[1,7,1],[1,3,1],[1,4,2],[1,3,1],[1,3,1],[1,1,2],[1,2,1],[1,18,8],[1,1,3],[1,1,1],[1,2,5],[1,13,9],[1,2,2],[1,6,1],[1,5,1],[1,13,3],[1,7,1],[1,3,2],[1,2,1],[1,4,1],[1,2,2],[1,6,2],[1,4,3],[1,1,3],[1,3,2],[1,12,8],[1,6,1],[1,7,1],[1,6,3],[1,9,4],[1,16,17],[1,1,2],[1,4,1],[1,2,1],[1,2,1],[1,2,1],[1,1,1],[1,4,2],[1,4,1],[1,8,1],[1,14,17],[1,7,1],[1,7,6],[1,5,1],[1,4,2],[1,80,2],[1,13,1],[1,11,1],[1,9,1],[1,2,4],[1,3,1],[1,2,1],[1,5,2],[1,3,1],[1,1,2],[1,12,1],[1,8,5],[1,6,3],[1,17,1],[1,3,4],[1,1,2],[1,5,2],[1,1,3],[1,2,2],[1,2,3],[1,2,1],[1,4,1],[1,1,1],[1,14,1],[1,2,1],[1,16,4],[1,15,2],[1,3,3],[1,8,8],[1,6,1],[1,25,4],[1,6,1],[1,7,3],[1,36,2],[1,2,1],[1,32,2],[1,1,1],[1,7,1],[1,14,2],[1,21,1],[1,3,1],[1,27,7],[1,6,3],[1,1,5],[1,5,4],[1,12,2],[1,2,1],[1,2,1],[1,8,7],[1,8,8],[1,7,1],[1,2,1],[1,4,1],[1,1,7],[1,10,3],[1,17,1],[1,1,1],[1,8,6],[1,29,5],[1,12,2],[1,7,2],[1,7,1],[1,2,2],[1,2,1],[1,2,1],[1,54,9],[1,1,1],[1,12,2],[1,8,1],[1,8,4],[1,39,1],[1,3,3],[1,9,4],[1,6,5],[1,2,1],[1,15,2],[1,18,1],[1,2,2],[1,1,1],[1,1,1],[1,2,4],[1,3,1],[1,6,1],[1,3,3],[1,4,3],[1,3,2],[1,1,1],[1,2,2],[1,16,12],[1,4,2],[1,15,2],[1,6,1],[1,7,1],[1,9,8],[1,70,2],[1,5,1],[1,4,3],[1,24,4],[1,8,6],[1,18,43],[1,23,3],[1,10,1],[1,14,8],[1,6,4],[1,2,1],[1,2,1],[1,1,1],[1,2,1],[1,9,3],[1,6,4],[1,5,3],[1,43,2],[1,5,1],[1,11,1],[1,1,2],[1,5,3],[1,4,2],[1,16,2],[1,16,10],[1,5,1],[1,2,2],[1,2,1],[1,2,3],[1,4,6],[1,3,12],[1,6,1],[1,10,1],[1,1,2],[1,13,1],[1,3,1],[1,5,2],[1,6,1],[1,3,1],[1,2,1],[1,1,1],[1,13,1],[1,20,1],[1,20,2],[1,8,1],[1,5,2],[1,2,2],[1,10,5],[1,1,3],[1,7,2],[1,4,1],[1,15,18],[1,1,4],[1,5,2],[1,4,1],[1,1,11],[1,1,3],[1,4,1],[1,1,1],[1,2,1],[1,2,12],[1,5,1],[1,3,1],[1,25,2],[1,16,1],[1,10,1],[1,18,1],[1,28,3],[1,5,6],[1,4,2],[1,2,2],[1,51,124],[1,4,2],[1,5,1],[1,28,1],[1,4,5],[1,6,2],[1,20,1],[1,7,1],[1,5,3],[1,11,1],[1,4,3],[1,1,1],[1,6,3],[1,5,1],[1,3,1],[1,10,2],[1,64,5],[1,12,12],[1,5,2],[1,6,1],[1,8,2],[1,28,8],[1,19,1],[1,2,1],[1,1,1],[2,6,1],[2,2,2],[2,4,5],[2,11,1],[2,4,1],[2,4,1],[2,14,1],[2,19,2],[2,2,1],[2,6,4],[2,2,1],[2,6,2],[2,4,1],[2,12,2],[2,15,2],[2,5,1],[2,11,1],[2,11,1],[2,2,2],[2,3,3],[2,5,9],[2,2,1],[2,1,1],[2,1,4],[2,2,1],[2,4,1],[2,11,1],[2,6,1],[2,2,2],[2,8,1],[2,81,7],[2,8,1],[2,5,1],[2,6,3],[2,2,2],[2,39,1],[2,5,2],[2,5,2],[2,2,4],[2,10,2],[2,4,2],[2,2,1],[2,6,6],[2,8,2],[2,56,1],[2,9,1],[2,1,1],[2,16,3],[2,5,2],[2,3,2],[2,12,25],[2,4,4],[2,6,2],[2,7,1],[2,30,11],[2,4,1],[2,16,5],[2,8,2],[2,7,2],[2,11,1],[2,7,1],[2,2,1],[2,1,1],[2,2,9],[2,39,6],[2,2,1],[2,2,1],[2,7,1],[2,19,1],[2,11,2],[2,8,2],[2,4,7],[2,2,1],[2,7,1],[2,1,1],[2,4,1],[2,6,1],[2,6,1],[2,2,4],[2,26,37],[2,2,1],[2,13,2],[2,35,10],[2,13,1],[2,6,1],[2,10,2],[2,19,9],[2,7,1],[2,7,1],[2,2,2],[2,1,1],[2,5,2],[2,10,2],[2,6,1],[2,6,1],[2,6,1],[2,2,2],[2,1,1],[2,6,60],[2,8,1],[2,18,1],[2,4,2],[2,1,1],[2,1,1],[2,2,3],[2,21,2],[2,7,2],[2,11,3],[2,14,2],[2,3,2],[2,12,1],[2,1,2],[2,34,1],[2,1,1],[2,16,1],[2,1,1],[2,11,1],[2,14,1],[2,8,1],[2,9,1],[2,8,1],[2,3,1],[2,4,4],[2,4,1],[2,44,3],[2,4,1],[2,19,6],[2,19,2],[2,3,2],[2,17,2],[2,17,4],[2,1,6],[2,5,3],[2,27,6],[2,5,3],[2,6,3],[2,22,2],[2,22,3],[2,13,19],[2,8,1],[2,2,2],[2,7,1],[2,9,3],[2,2,1],[2,11,1],[2,8,1],[2,4,1],[2,8,2],[2,4,1],[2,1,1],[2,16,1],[2,2,1],[2,4,1],[2,9,11],[2,3,3],[2,3,1],[2,1,2],[2,3,1],[2,28,1],[2,8,5],[2,6,2],[2,8,1],[2,1,1],[2,10,1],[2,6,1],[2,55,1],[2,1,1],[2,4,2],[2,3,2],[2,16,4],[2,11,1],[2,2,3],[2,15,1],[2,1,10],[2,8,2],[2,15,1],[2,1,1],[2,7,114],[2,10,3],[2,1,1],[2,5,1],[2,3,3],[2,2,1],[2,1,1],[2,8,1],[2,96,1],[2,10,3],[2,3,2],[2,2,1],[2,1,1],[2,3,1],[2,25,2],[2,3,1],[2,12,4],[2,2,9],[2,3,1],[2,2,1],[2,9,1],[2,12,1],[2,18,1],[2,23,6],[2,9,85],[2,2,8],[2,1,2],[2,26,1],[2,8,2],[2,6,3],[2,1,4],[2,6,1],[2,8,3],[2,9,2],[2,1,1],[2,7,1],[2,1,3],[2,7,1],[2,3,2],[2,10,1],[2,2,2],[2,8,2],[2,4,4],[2,23,2],[2,8,5],[2,1,1],[2,3,3],[2,7,2],[2,1,1],[2,2,1],[2,1,7],[2,10,1],[2,18,1],[2,39,5],[2,13,2],[2,7,2],[2,6,2],[2,9,1],[2,5,1],[2,7,1],[2,35,2],[2,2,2],[2,5,2],[2,1,1],[2,9,2],[2,18,1],[2,2,3],[2,35,1],[2,6,5],[2,2,2],[2,2,1],[2,12,2],[2,1,1],[2,10,1],[2,6,1],[2,2,1],[2,15,2],[2,7,1],[2,5,4],[2,4,1],[2,2,14],[2,2,1],[2,5,3],[2,21,2],[2,10,1],[2,2,1],[2,8,1],[2,16,1],[2,9,2],[2,11,2],[2,1,6],[2,12,2],[2,18,2],[2,2,4],[2,4,3],[2,7,11],[2,3,1],[2,28,5],[2,1,4],[2,8,1],[2,2,5],[2,2,1],[2,3,1],[2,10,2],[2,3,3],[2,2,1],[2,17,1],[2,6,1],[2,16,1],[2,10,16],[2,17,1],[2,4,2],[2,1,1],[2,3,3],[2,7,3],[2,5,1],[2,11,1],[2,13,1],[2,3,1],[2,6,1],[2,5,2],[2,17,2],[2,33,13],[2,2,10],[2,3,5],[2,4,3],[2,5,1],[2,2,4],[2,8,2],[2,14,1],[2,16,1],[2,2,3],[2,19,6],[2,5,1],[2,8,2],[2,7,1],[2,1,1],[2,11,1],[2,2,2],[2,11,10],[2,10,1],[2,14,1],[2,1,7],[2,10,1],[2,34,1],[2,2,1],[2,2,4],[2,9,2],[2,16,1],[2,2,4],[2,8,3],[2,1,2],[2,3,5],[2,13,5],[2,20,1],[2,25,8],[2,9,1],[2,1,1],[2,15,3],[2,6,2],[2,394,278],[2,11,2],[2,1,1],[2,3,15],[2,4,2],[2,3,6],[2,6,3],[2,1,12],[2,2,1],[2,1,3],[2,11,2],[2,20,3],[2,31,9],[2,25,7],[2,15,2],[2,11,31],[2,17,2],[2,5,1],[2,2,2],[2,4,1],[2,6,2],[2,27,2],[2,10,2],[2,1,2],[2,26,5],[2,5,14],[2,12,2],[2,5,2],[2,2,1],[2,2,3],[2,6,1],[2,1,3],[2,9,3],[2,18,1],[2,5,5],[2,29,13],[2,14,1],[2,1,4],[2,3,1],[2,5,1],[2,19,4],[2,11,7],[2,8,3],[2,18,1],[2,3,5],[2,11,1],[2,4,1],[2,10,4],[2,19,2],[2,10,3],[2,12,2],[2,19,9],[2,73,3],[2,13,3],[2,12,1],[2,4,5],[2,55,1],[2,6,6],[2,27,2],[2,2,1],[2,20,1],[2,8,1],[2,1,1],[2,29,2],[2,10,8],[2,5,2],[2,10,2],[2,14,1],[2,10,1],[2,1,1],[2,4,2],[2,5,1],[2,1,4],[2,4,2],[2,9,1],[2,9,4],[2,2,1],[2,4,1],[2,6,2],[2,2,2],[2,10,15],[2,17,1],[2,9,1],[2,9,1],[2,8,2],[2,4,1],[2,4,1],[2,243,2],[2,9,3],[2,12,2],[2,4,3],[2,2,1],[2,1,2],[2,57,4],[2,7,2],[2,8,2],[2,14,2],[2,2,1],[2,6,1],[2,7,2],[2,8,1],[2,4,3],[2,36,5],[2,3,1],[2,1,1],[2,45,8],[2,1,1],[2,2,3],[2,9,1],[2,1,1],[2,13,2],[2,44,6],[2,2,1],[2,36,1],[2,4,1],[2,5,1],[2,3,2],[2,1,1],[2,28,2],[2,9,1],[2,3,3],[2,10,2],[2,16,1],[2,1,1],[2,1,1],[2,13,1],[2,14,3],[2,65,1],[2,7,1],[2,2,1],[2,11,8],[2,4,1],[2,17,1],[2,6,1],[2,15,5],[2,15,1],[2,17,2],[2,8,1],[2,8,1],[2,1,2],[2,5,7],[2,1,1],[2,3,2],[2,2,1],[2,4,1],[2,32,1],[2,3,1],[2,1,1],[2,1,1],[2,2,2],[2,2,1],[2,8,2],[2,11,3],[2,2,3],[2,42,3],[2,5,1],[2,6,2],[2,1,1],[2,9,1],[2,2,2],[2,5,1],[2,2,1],[2,7,1],[2,7,6],[2,6,2],[2,3,1],[2,1,3],[2,15,1],[2,23,1],[2,1,1],[2,3,1],[2,4,2],[2,8,1],[2,2,7],[2,3,4],[2,6,5],[2,4,1],[2,5,3],[2,16,5],[2,11,1],[2,13,1],[2,22,3],[2,10,5],[2,2,2],[2,2,2],[2,6,1],[2,7,1],[2,4,2],[2,4,3],[2,7,3],[2,7,4],[2,1,1],[2,71,9],[2,4,8],[2,33,4],[2,16,2],[2,1,18],[2,15,1],[2,3,1],[2,8,1],[2,6,3],[2,4,2],[2,1,1],[2,7,2],[2,2,8],[2,2,1],[2,8,1],[2,1,3],[2,5,1],[2,2,2],[2,11,1],[2,17,3],[2,118,1],[2,8,4],[2,14,1],[2,3,4],[2,14,1],[2,2,2],[2,4,3],[2,2,1],[2,11,1],[2,8,10],[2,1,2],[2,3,3],[2,2,2],[2,12,1],[2,2,2],[2,26,3],[2,3,2],[2,3,3],[2,19,1],[2,1,13],[2,23,2],[2,3,1],[2,7,4],[2,10,4],[2,2,3],[2,71,3],[2,3,3],[2,23,1],[2,1,1],[2,34,3],[2,62,1],[2,4,1],[2,7,2],[2,2,8],[2,6,1],[2,20,3],[2,26,2],[2,5,2],[2,2,1],[2,7,1],[2,1,1],[2,7,2],[2,28,7],[2,4,1],[2,2,2],[2,4,1],[2,7,1],[2,2,3],[2,3,1],[2,8,3],[2,43,1],[2,2,1],[2,1,4],[2,2,1],[2,13,3],[2,4,2],[2,6,1],[2,17,1],[2,2,8],[2,32,1],[2,11,2],[2,5,2],[2,45,3],[2,9,1],[2,14,2],[2,9,1],[2,2,1],[2,10,5],[2,2,1],[2,13,1],[2,2,2],[2,3,5],[2,2,1],[2,17,3],[2,11,1],[2,15,1],[2,13,4],[2,7,7],[2,10,2],[2,6,4],[2,2,3],[2,1,3],[2,27,2],[2,2,3],[2,2,1],[2,3,1],[2,3,9],[2,3,46],[2,11,1],[2,30,1],[2,5,1],[2,8,8],[2,2,1],[2,1,1],[2,2,1],[2,6,7],[2,1,1],[2,4,1],[2,4,2],[2,15,2],[2,6,7],[2,4,2],[2,5,1],[2,1,4],[2,2,3],[2,1,2],[2,2,2],[2,1,7],[2,15,2],[2,18,3],[2,2,1],[2,6,1],[2,8,1],[2,134,20],[2,26,1],[2,2,2],[2,8,4],[2,1,1],[2,3,1],[2,14,1],[2,3,1],[2,26,1],[2,19,1],[2,1,1],[2,1,1],[2,7,1],[2,5,2],[2,5,8],[2,3,4],[2,1,1],[2,2,2],[2,16,1],[2,7,2],[2,6,1],[2,1,6],[2,4,3],[2,2,2],[2,2,2],[2,2,1],[2,2,1],[2,1,2],[2,8,3],[2,4,1],[2,9,1],[2,18,33],[2,14,1],[2,1,1],[2,3,2],[2,7,1],[2,14,4],[2,4,2],[2,31,7],[2,19,2],[2,11,4],[2,2,1],[2,7,2],[2,2,1],[2,2,3],[2,52,4],[2,4,1],[2,1,1],[2,4,3],[2,11,1],[2,3,2],[2,6,1],[2,10,3],[2,6,1],[2,12,1],[2,10,2],[2,4,2],[2,23,2],[2,3,3],[2,8,1],[2,21,6],[2,2,2],[2,1,1],[2,1,1],[2,16,3],[2,9,2],[2,5,1],[2,2,2],[2,1,4],[2,4,1],[2,1,25],[2,24,2],[2,6,1],[2,3,4],[2,10,4],[2,6,2],[2,35,2],[2,2,2],[2,1,1],[2,25,10],[2,8,1],[2,1,2],[2,1,1],[2,2,1],[2,3,8],[2,2,1],[2,2,1],[2,5,2],[2,4,3],[2,2,8],[2,1,1],[2,4,2],[2,3,3],[2,12,1],[2,3,2],[2,4,1],[2,2,4],[2,7,2],[2,1,1],[2,73,14],[2,90,1],[2,4,1],[2,2,1],[2,1,1],[2,6,3],[2,1,1],[2,4,1],[2,10,3],[2,2,3],[2,1,1],[2,6,1],[2,37,2],[2,10,1],[2,2,2],[2,60,2],[2,16,3],[2,6,1],[2,1,1],[2,3,4],[2,38,5],[2,6,2],[2,2,1],[2,2,1],[2,9,2],[2,11,1],[2,6,1],[2,9,1],[2,2,2],[2,4,3],[2,8,1],[2,3,2],[2,1,9],[2,14,2],[2,8,1],[2,30,4],[2,2,1],[2,31,2],[2,31,1],[2,21,23],[2,1,5],[2,4,1],[2,2,1],[2,5,3],[2,4,2],[2,10,2],[2,2,2],[2,18,1],[2,15,1],[2,2,1],[2,1,2],[2,5,1],[2,13,1],[2,14,4],[2,1,4],[2,5,1],[2,109,3],[2,18,2],[2,1,2],[2,164,114],[2,8,1],[2,2,3],[2,4,1],[2,1,1],[2,10,1],[2,9,2],[2,4,3],[2,1,75],[2,6,1],[2,17,2],[2,3,1],[2,9,1],[2,2,1],[2,21,1],[2,30,3],[2,7,2],[2,2,2],[2,63,5],[2,16,3],[2,6,1],[2,2,8],[2,25,2],[2,31,3],[2,126,21],[2,10,1],[2,2,2],[2,14,7],[2,6,10],[2,4,3],[2,7,1],[2,12,1],[2,2,1],[2,3,2],[2,2,15],[2,1,4],[2,4,1],[2,3,1],[2,4,1],[2,6,2],[2,7,3],[2,2,3],[2,9,2],[2,6,1],[2,2,1],[2,16,1],[2,22,2],[2,10,1],[2,10,4],[2,7,2],[2,13,1],[2,3,1],[2,7,2],[2,23,12],[2,3,1],[2,6,1],[2,4,2],[2,29,2],[2,5,3],[2,8,1],[2,1,1],[2,6,1],[2,3,1],[2,17,2],[2,15,1],[2,2,1],[2,6,1],[2,2,2],[2,30,1],[2,3,1],[2,2,2],[2,2,5],[2,2,1],[2,37,5],[2,6,2],[2,7,6],[2,2,3],[2,3,3],[2,2,5],[2,75,6],[2,2,3],[2,10,1],[2,2,3],[2,7,2],[2,30,1],[2,12,33],[2,1,1],[2,3,4],[2,14,1],[2,9,2],[2,8,1],[2,1,1],[2,9,1],[2,4,1],[2,2,1],[2,7,1],[2,4,1],[2,3,1],[2,4,3],[2,1,1],[2,5,2],[2,3,4],[2,4,2],[2,6,3],[2,13,5],[2,4,2],[2,6,1],[2,2,5],[2,2,3],[2,1,1],[2,14,1],[2,5,1],[2,4,2],[2,9,1],[2,7,6],[2,4,1],[2,19,2],[2,23,1],[2,20,7],[2,9,1],[2,4,1],[2,12,2],[2,9,4],[2,3,2],[2,3,7],[2,3,1],[2,10,2],[2,6,1],[2,7,1],[2,1,1],[2,9,1],[2,6,1],[2,1,1],[2,17,2],[2,9,1],[2,5,2],[2,1,1],[2,11,2],[2,9,1],[2,1,1],[2,3,6],[2,2,1],[2,5,9],[2,12,2],[2,2,1],[2,6,2],[2,17,4],[2,2,2],[2,7,1],[2,596,5],[2,6,1],[2,2,1],[2,58,125],[2,6,1],[2,8,1],[2,2,1],[2,3,1],[2,1,2],[2,11,4],[2,1,1],[2,9,6],[2,2,8],[2,1,1],[2,6,2],[2,1,1],[2,2,1],[2,7,2],[2,7,3],[2,14,2],[2,1,1],[2,18,9],[2,2,5],[2,2,12],[2,8,4],[2,6,4],[2,3,1],[2,19,2],[2,4,1],[2,2,1],[2,4,3],[2,3,1],[2,13,1],[2,1,1],[2,7,1],[2,1,1],[2,8,1],[2,13,14],[2,11,1],[2,31,1],[2,4,1],[2,6,1],[2,3,2],[2,26,1],[2,4,2],[2,1,1],[2,2,2],[2,1,2],[2,1,1],[2,7,1],[2,8,1],[2,6,2],[2,19,13],[2,2,3],[2,8,3],[2,1,6],[2,5,1],[2,1,1],[2,6,1],[2,9,1],[2,2,2],[2,35,1],[2,1,1],[2,27,2],[2,54,2],[2,6,2],[2,5,1],[2,2,1],[2,2,4],[2,2,1],[2,2,1],[2,14,1],[2,9,1],[2,53,17],[2,2,1],[2,10,1],[2,9,1],[2,23,1],[2,7,1],[2,12,4],[2,1,2],[2,8,1],[2,7,4],[2,2,1],[2,2,1],[2,3,1],[2,11,1],[2,2,2],[2,6,1],[2,2,1],[2,18,4],[2,3,4],[2,8,2],[2,13,1],[2,2,1],[2,1,2],[2,14,4],[2,8,11],[2,1,1],[2,8,3],[2,7,3],[2,90,1],[2,20,2],[2,16,1],[2,20,2],[2,3,1],[2,8,10],[2,10,1],[2,10,1],[2,1,1],[2,3,1],[2,5,1],[2,37,3],[2,24,3],[2,10,1],[2,3,1],[2,2,4],[2,4,1],[2,19,2],[2,1,1],[2,5,1],[2,8,1],[2,3,1],[2,1,1],[2,2,1],[2,2,32],[2,2,1],[2,4,1],[2,1,1],[2,2,2],[2,5,1],[2,2,3],[2,25,9],[2,2,1],[2,4,4],[2,2,1],[2,15,1],[2,59,1],[2,3,2],[2,4,1],[2,9,2],[2,3,10],[2,6,1],[2,5,5],[2,8,2],[2,2,2],[2,4,2],[2,10,1],[2,126,1],[2,3,1],[2,8,1],[2,9,2],[2,1,30],[2,25,1],[2,7,3],[2,2,2],[2,1,3],[2,21,1],[2,38,1],[2,48,1],[2,22,1],[2,4,2],[2,55,2],[2,5,1],[2,15,1],[2,14,44],[2,4,1],[2,1,2],[2,2,3],[2,2,1],[2,3,3],[2,6,1],[2,2,1],[2,26,7],[2,4,1],[2,1,2],[2,3,2],[2,6,2],[2,10,1],[2,18,3],[2,2,1],[2,38,2],[2,1,1],[2,8,1],[2,8,1],[2,3,1],[2,4,1],[2,1,1],[2,1,2],[2,4,1],[2,26,2],[2,3,3],[2,2,1],[2,6,1],[2,19,1],[2,3,4],[2,2,1],[2,4,1],[2,11,1],[2,9,1],[2,9,1],[2,9,1],[2,1,1],[2,1,1],[2,7,1],[2,2,1],[2,11,4],[2,10,2],[2,4,1],[2,6,1],[2,4,1],[2,8,1],[2,11,1],[2,1,1],[2,7,1],[2,8,2],[2,9,1],[2,8,1],[2,41,2],[2,2,4],[2,1,6],[2,2,1],[2,6,3],[2,128,5],[2,2,1],[2,13,13],[2,6,1],[2,1,3],[2,3,3],[2,7,2],[2,10,12],[2,2,1],[2,8,1],[2,1,1],[2,7,1],[2,2,1],[2,10,2],[2,11,10],[2,1,1],[2,8,3],[2,4,5],[2,2,1],[2,14,2],[2,4,1],[2,4,1],[2,7,1],[2,6,1],[2,7,3],[2,1,1],[2,2,1],[2,7,2],[2,2,1],[2,6,1],[2,8,1],[2,2,4],[2,6,1],[2,43,1],[2,108,3],[2,8,1],[2,13,1],[2,4,1],[2,10,3],[2,2,1],[2,24,2],[2,1,2],[2,4,2],[2,2,2],[2,40,6],[2,6,2],[2,6,2],[2,4,3],[2,28,5],[2,4,1],[2,15,1],[2,12,1],[2,1,1],[2,27,1],[3,1,1],[3,5,2],[3,16,2],[3,16,3],[3,1,2],[3,98,2],[3,91,7],[3,6,37],[3,4,1],[3,9,1],[3,97,2],[3,6,1],[3,23,3],[3,115,1],[3,2,1],[3,1,1],[3,1,1],[3,14,4],[3,1,1],[3,28,1],[3,1,1],[3,6,1],[3,15,5],[3,3,1],[3,52,1],[3,2,3],[3,3,1],[3,4,5],[3,13,1],[3,16,3],[3,13,1],[3,17,1],[3,4,4],[3,6,7],[3,14,1],[3,32,1],[3,3,3],[3,11,4],[3,1,1],[3,8,6],[3,9,7],[3,2,1],[3,9,2],[3,5,2],[3,26,12],[3,11,3],[3,12,2],[3,4,2],[3,6,2],[3,30,6],[3,1,2],[3,10,1],[3,1,1],[3,4,1],[3,7,1],[3,30,29],[3,2,3],[3,2,2],[3,2,1],[3,11,1],[3,2,3],[3,3,1],[3,9,1],[3,2,2],[3,5,1],[3,1,2],[3,1,13],[3,6,9],[3,1,1],[3,6,2],[3,1,3],[3,4,1],[3,6,1],[3,9,3],[3,1,1],[3,9,2],[3,19,45],[3,2,1],[3,7,8],[3,21,3],[3,6,2],[3,2,1],[3,6,1],[3,5,1],[3,2,1],[3,15,7],[3,2,1],[3,9,3],[3,11,1],[3,4,1],[3,7,1],[3,2,1],[3,19,1],[3,5,1],[3,2,1],[3,1,1],[3,22,3],[3,21,5],[3,13,1],[3,2,1],[3,4,1],[3,23,1],[3,8,1],[3,3,2],[3,2,2],[3,4,1],[3,12,2],[3,5,2],[3,16,8],[3,6,1],[3,1,2],[3,2,1],[3,7,1],[3,6,1],[3,6,3],[3,45,1],[3,4,5],[3,1,2],[3,3,1],[3,2,1],[3,1,1],[3,12,1],[3,8,1],[3,3,1],[3,6,1],[3,2,2],[3,9,2],[3,5,2],[3,2,1],[3,3,1],[3,15,1],[3,11,1],[3,4,1],[3,9,2],[3,3,1],[3,4,1],[3,1,3],[3,6,15],[3,6,3],[3,2,6],[3,1,3],[3,3,2],[3,15,1],[3,6,1],[3,7,1],[3,5,1],[3,9,1],[3,49,2],[3,5,2],[3,9,4],[3,39,1],[3,4,3],[3,1,5],[3,1,2],[3,2,1],[3,14,2],[3,4,3],[3,18,1],[3,5,4],[3,19,3],[3,3,1],[3,2,1],[3,3,2],[3,48,10],[3,1,1],[3,5,6],[3,12,3],[3,1,2],[3,5,4],[3,4,1],[3,4,1],[3,5,1],[3,1,1],[3,10,1],[3,10,2],[3,6,3],[3,2,7],[3,4,1],[3,9,2],[3,1,1],[3,2,1],[3,4,6],[3,1,1],[3,25,9],[3,11,1],[3,2,1],[3,8,2],[3,1,1],[3,9,3],[3,4,6],[3,1,7],[3,1,1],[3,4,1],[3,11,2],[3,14,1],[3,65,2],[3,6,1],[3,5,2],[3,2,2],[3,13,1],[3,2,5],[3,2,1],[3,4,2],[3,25,1],[3,2,1],[3,2,3],[3,9,1],[3,5,5],[3,46,1],[3,6,2],[3,12,9],[3,4,4],[3,2,3],[3,13,5],[3,39,16],[3,3,1],[3,1,2],[3,68,14],[3,5,1],[3,11,1],[3,7,1],[3,4,1],[3,53,11],[3,4,3],[3,4,1],[3,2,1],[3,4,1],[3,1,1],[3,1,2],[3,8,4],[3,5,1],[3,6,5],[3,6,13],[3,403,3],[3,23,1],[3,3,3],[3,14,1],[3,10,1],[3,3,2],[3,46,11],[3,4,3],[3,29,1],[3,41,2],[3,11,1],[3,15,3],[3,11,2],[3,6,1],[3,3,1],[3,17,2],[3,14,3],[3,5,4],[3,2,1],[3,2,1],[3,5,6],[3,6,1],[3,54,2],[3,2,1],[3,4,2],[3,1,1],[3,7,1],[3,8,34],[3,7,1],[3,1,2],[3,3,2],[3,2,5],[3,1,1],[3,15,12],[3,13,1],[3,5,1],[3,1,1],[3,5,1],[3,39,1],[3,26,9],[3,11,1],[3,6,1],[3,2,1],[3,19,4],[3,4,5],[3,10,1],[3,11,6],[3,4,1],[3,38,1],[3,1,1],[3,1,3],[3,2,1],[3,5,10],[3,4,1],[3,18,2],[3,4,1],[3,19,1],[3,1,1],[3,8,6],[3,1,1],[3,9,1],[3,8,3],[3,15,4],[3,9,3],[3,13,1],[3,10,1],[3,1,2],[3,5,4],[3,4,2],[3,4,1],[3,28,1],[3,6,2],[3,9,1],[3,1,2],[3,2,2],[3,25,1],[3,5,8],[3,5,3],[3,8,2],[3,2,1],[3,14,5],[3,2,1],[3,11,3],[3,10,1],[3,2,2],[3,1,1],[3,3,1],[3,9,1],[3,39,9],[3,27,2],[3,1,1],[3,1,3],[3,12,3],[3,6,1],[3,14,2],[3,17,3],[3,198,1],[3,3,1],[3,5,1],[3,1,1],[3,2,4],[3,12,1],[3,31,1],[3,8,14],[3,25,2],[3,16,2],[3,18,2],[3,2,3],[3,2,3],[3,6,28],[3,22,3],[3,6,1],[3,8,2],[3,4,3],[3,3,3],[3,8,1],[3,1,1],[3,1,2],[3,1,1],[3,1,1],[3,1,2],[3,6,2],[3,2,3],[3,4,1],[3,3,1],[3,1,1],[3,3,2],[3,8,10],[3,6,1],[3,2,1],[3,2,1],[3,5,1],[3,29,6],[3,10,1],[3,3,8],[3,1,3],[3,2,2],[3,3,1],[3,3,4],[3,5,19],[3,15,1],[3,65,1],[3,2,2],[3,60,3],[3,52,1],[3,1,1],[3,4,2],[3,4,1],[3,6,1],[3,7,4],[3,1,1],[3,13,1],[3,8,3],[3,13,1],[3,6,1],[3,3,2],[3,14,1],[3,2,2],[3,4,1],[3,1,1],[3,11,29],[3,7,1],[3,21,6],[3,4,1],[3,1,1],[3,2,1],[3,9,1],[3,2,4],[3,3,1],[3,2,3],[3,1,2],[3,3,2],[3,3,4],[3,16,2],[3,9,2],[3,2,1],[3,17,8],[3,9,4],[3,7,1],[3,6,4],[3,1,2],[3,2,1],[3,4,4],[3,2,1],[3,3,1],[3,3,1],[3,11,1],[3,2,2],[3,2,1],[3,2,3],[3,2,2],[3,10,6],[3,10,4],[3,1,1],[3,8,3],[3,29,2],[3,7,1],[3,2,1],[3,4,1],[3,11,1],[3,2,1],[3,2,2],[3,13,3],[3,4,1],[3,3,1],[3,2,4],[3,18,1],[3,12,1],[3,6,3],[3,3,1],[3,5,1],[3,3,2],[3,9,2],[3,5,1],[3,5,1],[3,11,1],[3,1,1],[3,39,18],[3,3,2],[3,4,1],[3,17,2],[3,14,2],[3,10,6],[3,1,1],[3,4,5],[3,2,1],[3,4,6],[3,12,1],[3,106,80],[3,32,1],[3,7,1],[3,8,1],[3,2,1],[3,33,2],[3,33,7],[3,10,1],[3,3,2],[3,4,3],[3,16,3],[3,7,1],[3,8,1],[3,16,1],[3,8,1],[3,8,1],[3,30,1],[3,7,1],[3,2,1],[3,3,10],[3,27,1],[3,2,1],[3,1,3],[3,2,1],[3,23,1],[3,1,1],[3,5,2],[3,6,1],[3,2,1],[3,2,13],[3,1,3],[3,6,2],[3,5,1],[3,26,1],[3,4,5],[3,2,1],[3,9,1],[3,6,1],[3,2,1],[3,21,2],[3,15,1],[3,4,2],[3,2,1],[3,30,1],[3,4,2],[3,2,1],[3,2,58],[3,8,2],[3,13,1],[3,16,2],[3,10,6],[3,6,1],[3,6,1],[3,2,6],[3,1,1],[3,2,4],[3,11,9],[3,25,2],[3,4,2],[3,1,1],[3,9,9],[3,1,9],[3,3,3],[3,4,1],[3,2,3],[3,5,2],[3,2,7],[3,2,1],[3,2,1],[3,6,3],[3,3,4],[3,1,2],[3,4,3],[3,7,118],[3,7,1],[3,6,1],[3,3,1],[3,1,15],[3,1,2],[3,4,2],[3,2,1],[3,4,1],[3,6,1],[3,23,1],[3,1,1],[3,3,1],[3,4,1],[3,10,3],[3,2,2],[3,6,5],[3,8,1],[3,3,1],[3,4,1],[3,20,2],[3,14,2],[3,7,1],[3,21,29],[3,10,2],[3,10,2],[3,3,3],[3,2,1],[3,3,2],[3,24,3],[3,3,1],[3,9,1],[3,6,1],[3,22,1],[3,13,1],[3,5,2],[3,1,1],[3,9,1],[3,10,2],[3,4,1],[3,7,1],[3,2,1],[3,12,4],[3,48,2],[3,43,1],[3,6,1],[3,1,1],[3,4,1],[3,14,10],[3,2,1],[3,1,1],[3,1,1],[3,3,1],[3,11,5],[3,36,1],[3,4,49],[3,11,1],[3,8,1],[3,2,2],[3,3,1],[3,3,1],[3,8,3],[3,15,8],[3,30,9],[3,23,5],[3,10,1],[3,7,6],[3,1,1],[3,9,2],[3,6,1],[3,3,1],[3,3,1],[3,2,1],[3,21,1],[3,13,2],[3,4,2],[3,9,2],[3,8,1],[3,2,2],[3,4,2],[3,1,1],[3,9,2],[3,32,2],[3,2,2],[3,10,1],[3,1,4],[3,4,3],[3,14,3],[3,5,2],[3,2,1],[3,3,1],[3,5,3],[3,14,3],[3,2,3],[3,6,1],[3,4,1],[3,1,1],[3,16,1],[3,3,1],[3,2,1],[3,5,1],[3,33,1],[3,3,1],[3,14,4],[3,8,3],[3,12,2],[3,14,1],[3,2,1],[3,1,1],[3,13,2],[3,8,1],[3,9,1],[3,17,1],[3,14,2],[3,16,1],[3,12,4],[3,2,1],[3,2,2],[3,20,1],[3,2,2],[3,8,4],[3,7,3],[3,8,1],[3,1,2],[3,5,5],[3,29,1],[3,1,1],[3,2,1],[3,8,2],[3,2,1],[3,7,9],[3,3,2],[3,7,1],[3,6,1],[3,6,2],[3,1,26],[3,3,3],[3,7,1],[3,2,2],[3,8,2],[3,7,1],[3,3,1],[3,4,4],[3,11,1],[3,5,15],[3,28,1],[3,3,8],[3,3,3],[3,2,4],[3,6,4],[3,3,2],[3,2,2],[3,5,1],[3,12,2],[3,10,2],[3,1,1],[3,6,1],[3,2,1],[3,3,2],[4,8,1],[4,3,1],[4,23,1],[4,4,9],[4,6,2],[4,9,1],[4,9,6],[4,5,9],[4,8,1],[4,2,1],[4,2,3],[4,8,1],[4,1,1],[4,4,1],[4,8,1],[4,2,1],[4,16,1],[4,1,8],[4,4,1],[4,1,3],[4,18,1],[4,2,1],[4,4,9],[4,2,1],[4,3,1],[4,9,2],[4,2,1],[4,7,3],[4,5,4],[4,27,2],[4,1,1],[4,8,2],[4,7,1],[4,8,1],[4,9,4],[4,3,2],[4,6,4],[4,2,2],[4,13,5],[4,8,1],[4,10,2],[4,1,1],[4,2,1],[4,1,2],[4,6,2],[4,5,2],[4,8,2],[4,16,2],[4,7,2],[4,102,5],[4,2,2],[4,1,1],[4,2,1],[4,1,2],[4,2,1],[4,29,4],[4,2,1],[4,1,1],[4,1,4],[4,3,2],[4,6,1],[4,19,2],[4,4,3],[4,1,12],[4,1,1],[4,62,3],[4,14,1],[4,1,1],[4,1,1],[4,7,4],[4,9,1],[4,15,1],[4,16,15],[4,2,2],[4,2,1],[4,41,3],[4,7,8],[4,7,3],[4,5,1],[4,9,1],[4,6,1],[4,1,3],[4,15,1],[4,5,4],[4,28,2],[4,11,3],[4,15,1],[4,1,1],[4,1,1],[4,12,1],[4,16,4],[4,12,5],[4,5,2],[4,8,4],[4,124,115],[4,11,3],[4,46,10],[4,4,1],[4,3,1],[4,2,1],[4,27,1],[4,1,1],[4,20,1],[4,2,1],[4,4,1],[4,53,1],[4,18,1],[4,1,1],[4,8,2],[4,3,1],[4,2,1],[4,5,1],[4,2,3],[4,2,5],[4,3,1],[4,8,1],[4,2,5],[4,8,2],[4,9,2],[4,48,1],[4,9,1],[4,20,2],[4,4,4],[4,3,2],[4,8,2],[4,6,2],[4,12,6],[4,9,1],[4,3,1],[4,4,1],[4,5,3],[4,5,1],[4,8,4],[4,3,1],[4,7,1],[4,6,2],[4,15,16],[4,6,1],[4,50,4],[4,23,4],[4,9,7],[4,8,2],[4,1,1],[4,2,1],[4,9,1],[4,12,1],[4,4,3],[4,2,2],[4,42,4],[4,1,1],[4,6,1],[4,11,10],[4,6,11],[4,7,1],[4,4,2],[4,4,2],[4,6,1],[4,59,4],[4,1,1],[4,2,7],[4,12,20],[4,11,3],[4,4,1],[4,12,3],[4,6,3],[4,7,2],[4,17,4],[4,106,8],[4,6,2],[4,7,1],[4,1,1],[4,8,1],[4,4,6],[4,3,1],[4,4,3],[4,14,3],[4,15,2],[4,4,1],[4,44,91],[4,7,2],[4,3,2],[4,2,1],[4,23,2],[4,30,1],[4,2,2],[4,10,1],[4,6,9],[4,6,2],[4,3,2],[4,3,2],[4,20,1],[4,4,1],[4,18,2],[4,12,1],[4,20,14],[4,10,1],[4,3,1],[4,2,1],[4,3,2],[4,3,3],[4,6,3],[4,2,4],[4,8,1],[4,8,5],[4,3,1],[4,10,2],[4,2,1],[4,1,1],[4,10,1],[4,25,2],[4,1,1],[4,4,1],[4,63,2],[4,1,1],[4,4,1],[4,6,7],[4,2,3],[4,8,1],[4,19,2],[4,11,1],[4,30,10],[4,4,4],[4,2,3],[4,2,1],[4,43,29],[4,2,1],[4,1,1],[4,17,1],[4,14,1],[4,13,1],[4,6,4],[4,2,2],[4,1,2],[4,3,1],[4,7,3],[4,4,1],[4,4,1],[4,1,1],[4,13,5],[4,2,1],[4,1,1],[4,5,1],[4,4,2],[4,13,2],[4,10,4],[4,8,1],[4,3,1],[4,2,2],[4,8,3],[4,4,2],[4,6,1],[4,7,1],[4,14,29],[4,19,1],[4,7,1],[4,19,1],[4,24,2],[4,2,1],[4,1,1],[4,28,1],[4,1,1],[4,2,1],[4,3,1],[4,2,1],[4,1,7],[4,2,4],[4,3,1],[4,29,1],[4,2,1],[4,14,1],[4,2,1],[4,28,3],[4,11,3],[4,1,2],[4,21,2],[4,1,1],[4,15,1],[4,17,1],[4,16,1],[4,13,1],[4,2,1],[4,15,5],[4,19,1],[4,17,1],[4,5,3],[4,12,2],[4,33,1],[4,8,1],[4,15,4],[4,2,11],[4,4,1],[4,1,10],[4,39,1],[4,28,1],[4,25,2],[4,1,1],[4,14,2],[4,8,32],[4,9,1],[4,7,1],[4,6,2],[4,1,2],[4,3,1],[4,6,2],[4,12,2],[4,2,2],[4,5,2],[4,18,1],[4,5,3],[4,6,2],[4,25,1],[4,3,16],[4,14,4],[4,2,6],[4,14,2],[4,3,1],[4,4,1],[4,9,3],[4,28,2],[4,9,1],[4,2,1],[4,7,1],[4,2,1],[4,1,4],[4,4,3],[4,1,1],[4,16,6],[4,3,1],[4,10,1],[4,12,3],[4,8,1],[4,4,1],[4,15,2],[4,4,1],[4,2,3],[4,2,9],[4,4,1],[4,7,2],[4,14,1],[4,31,3],[4,13,1],[4,19,2],[4,8,3],[4,2,1],[4,12,1],[4,5,1],[4,45,3],[4,6,1],[4,1,1],[4,12,6],[4,4,3],[4,3,1],[4,5,2],[4,4,4],[4,19,2],[4,8,1],[4,2,1],[4,27,2],[4,73,3],[4,22,2],[4,1,2],[4,7,46],[4,9,2],[4,2,1],[4,524,305],[4,7,1],[4,26,1],[4,2,1],[4,6,1],[4,30,2],[4,6,1],[4,25,92],[4,2,1],[4,13,1],[4,1,4],[4,1,7],[4,6,1],[4,8,2],[4,6,1],[4,4,2],[4,2,6],[4,12,2],[4,2,2],[4,5,2],[4,3,2],[4,13,1],[4,4,1],[4,6,3],[4,14,1],[4,15,1],[4,25,1],[4,3,1],[4,9,4],[4,94,3],[4,11,2],[4,12,4],[4,7,3],[4,3,1],[4,9,2],[4,3,1],[4,2,1],[4,8,3],[4,7,5],[4,2,45],[4,10,1],[4,10,4],[4,5,3],[4,6,6],[5,5,1],[5,2,1],[5,3,3],[5,11,2],[5,28,1],[5,8,1],[5,4,1],[5,4,1],[5,12,1],[5,7,1],[5,1,1],[5,38,7],[5,6,2],[5,4,2],[5,5,1],[5,2,2],[5,2,7],[5,1,4],[5,4,1],[5,4,1],[5,1,2],[5,3,1],[5,7,1],[5,2,1],[5,10,2],[5,4,1],[5,2,1],[5,2,2],[5,3,1],[5,15,78],[5,2,1],[5,1,5],[5,10,1],[5,6,4],[5,10,2],[5,5,1],[5,1,1],[5,1,1],[5,2,2],[5,6,1],[5,2,2],[5,6,2],[5,10,2],[5,3,1],[5,6,2],[5,4,3],[5,16,5],[5,47,48],[5,2,5],[5,6,7],[5,4,2],[5,3,1],[5,2,1],[5,8,1],[5,7,1],[5,2,2],[5,2,1],[5,3,1],[5,7,4],[5,1,1],[5,1,1],[5,8,6],[5,1,4],[5,9,3],[5,11,4],[5,6,1],[5,6,1],[5,2,1],[5,5,1],[5,84,1],[5,2,33],[5,8,1],[5,6,3],[5,5,3],[5,2,1],[5,10,2],[5,3,1],[5,68,9],[5,6,2],[5,21,11],[5,3,4],[5,3,1],[5,16,3],[5,2,2],[5,2,1],[5,14,2],[5,24,2],[5,19,1],[5,1,4],[5,1,1],[5,3,1],[5,6,1],[5,2,1],[5,5,2],[5,4,3],[5,26,3],[5,2,1],[5,6,4],[5,2,1],[5,6,3],[5,5,1],[5,8,3],[5,1,3],[5,9,1],[5,1,2],[5,11,2],[5,23,1],[5,7,1],[5,2,2],[5,3,2],[5,2,1],[5,11,2],[5,8,2],[5,1,1],[5,4,1],[5,2,1],[5,7,1],[5,11,1],[5,1,1],[5,33,1],[5,4,1],[5,5,1],[5,17,3],[5,1,2],[5,18,2],[5,1,2],[5,1,1],[5,2,3],[5,4,2],[5,2,1],[5,13,7],[5,5,1],[5,19,4],[5,23,9],[5,11,6],[5,7,2],[5,10,1],[5,2,1],[5,26,1],[5,3,3],[5,3,2],[5,3,2],[5,15,3],[5,2,1],[5,3,1],[5,4,1],[5,8,1],[5,4,1],[5,23,1],[5,6,1],[5,1,3],[5,124,17],[5,1,1],[5,1,1],[5,15,1],[5,11,2],[5,2,1],[5,2,2],[5,3,2],[5,1,1],[5,6,4],[5,6,1],[5,3,3],[5,6,5],[5,17,1],[5,7,2],[5,5,1],[5,11,1],[5,3,2],[5,36,2],[5,17,7],[5,4,1],[5,7,2],[5,2,1],[5,2,1],[5,2,1],[5,7,10],[5,4,1],[5,1,3],[5,19,2],[5,2,2],[5,3,1],[5,8,3],[5,4,1],[5,15,1],[5,2,3],[5,13,2],[5,1,3],[5,7,1],[5,23,48],[5,9,1],[5,12,10],[5,16,1],[5,10,1],[5,7,5],[5,2,1],[5,3,1],[5,23,2],[5,4,1],[5,18,1],[5,13,2],[5,54,136],[5,6,2],[5,2,2],[5,5,1],[5,6,1],[5,15,8],[5,14,9],[5,4,1],[5,7,2],[5,3,3],[5,117,5],[5,25,8],[5,14,4],[5,25,3],[5,7,1],[5,7,1],[5,15,3],[5,3,2],[5,4,1],[5,6,4],[5,14,4],[5,7,1],[5,20,1],[5,6,5],[5,12,1],[5,9,3],[5,2,1],[5,4,20],[5,4,3],[5,1,1],[5,1,1],[5,8,1],[5,4,1],[5,1,1],[5,6,3],[5,19,1],[5,14,1],[5,22,2],[5,2,1],[5,11,2],[5,1,1],[5,10,1],[5,4,1],[5,23,3],[5,3,1],[5,15,1],[5,8,4],[5,11,4],[5,4,1],[5,2,1],[5,8,6],[5,2,4],[5,2,7],[5,3,2],[5,2,1],[5,1,1],[5,1,1],[5,11,2],[5,4,10],[5,11,4],[5,110,4],[5,6,1],[5,2,1],[5,96,34],[6,4,1],[6,7,3],[6,2,1],[6,6,2],[6,10,1],[6,2,1],[6,10,1],[6,59,2],[6,7,4],[6,4,2],[6,3,1],[6,6,1],[6,1,4],[6,7,3],[6,2,3],[6,1,1],[6,12,1],[6,1,39],[6,28,1],[6,3,4],[6,8,3],[6,4,4],[6,9,2],[6,15,1],[6,10,1],[6,1,1],[6,2,1],[6,7,1],[6,2,1],[6,93,1],[6,14,6],[6,2,2],[6,55,39],[6,15,2],[6,23,3],[6,3,3],[6,35,2],[6,5,15],[6,1,7],[6,8,19],[6,10,10],[6,3,2],[6,6,3],[6,1,2],[6,6,1],[6,2,1],[6,4,1],[6,127,20],[6,20,18],[6,3,1],[6,9,2],[6,2,3],[6,10,1],[6,27,1],[6,9,1],[6,9,1],[6,28,1],[6,1,1],[6,10,1],[6,11,1],[6,5,1],[6,4,1],[6,82,35],[6,2,1],[6,1,1],[6,3,1],[6,2,1],[6,2,11],[6,2,8],[6,3,2],[6,12,3],[6,5,6],[6,42,4],[6,8,1],[6,2,1],[6,2,2],[6,10,3],[6,6,2],[6,48,2],[6,2,3],[6,2,2],[6,2,1],[6,4,1],[6,10,1],[6,1,1],[6,7,1],[6,35,1],[6,17,1],[6,21,2],[6,1,1],[6,4,2],[6,25,1],[6,7,2],[6,12,4],[6,2,6],[6,24,4],[6,2,1],[6,5,1],[6,2,1],[6,2,1],[6,3,2],[6,4,2],[6,2,1],[6,2,1],[6,2,9],[6,2,2],[6,5,1],[6,8,10],[6,1,1],[6,12,2],[6,10,1],[6,4,2],[6,12,4],[6,1,3],[6,3,2],[6,8,1],[6,4,4],[6,12,5],[6,4,2],[6,10,1],[6,1,1],[6,12,1],[6,6,4],[6,2,1],[6,3,2],[6,1,1],[6,3,5],[6,6,1],[6,32,1],[6,10,1],[6,6,5],[6,27,2],[6,7,1],[6,2,1],[6,10,2],[6,5,1],[6,8,2],[6,3,2],[6,9,2],[6,22,1],[6,2,2],[6,10,1],[6,3,4],[6,1,1],[6,3,6],[6,8,2],[6,44,1],[6,1,1],[6,9,7],[6,9,5],[6,19,4],[6,7,1],[6,1,1],[6,10,1],[6,14,2],[6,4,3],[6,4,1],[6,6,1],[6,3,1],[6,4,1],[6,6,3],[6,6,2],[6,6,1],[6,1,3],[6,12,13],[6,3,2],[6,1,4],[6,15,1],[6,39,4],[6,5,1],[6,1,5],[6,11,3],[6,5,7],[6,9,2],[6,1,1],[6,12,1],[6,12,1],[6,1,4],[6,11,1],[6,3,1],[6,6,2],[6,5,2],[6,2,1],[6,1,2],[6,2,1],[6,41,23],[6,3,1],[6,15,1],[6,1,1],[6,1,1],[6,2,2],[6,3,1],[6,10,1],[6,17,6],[6,5,2],[6,30,1],[7,2,2],[7,10,2],[7,8,3],[7,9,4],[7,4,1],[7,8,1],[7,2,1],[7,7,134],[7,16,1],[7,5,3],[7,3,1],[7,6,2],[7,1,1],[7,5,1],[7,5,1],[7,2,1],[7,24,1],[7,8,4],[7,9,2],[7,1,1],[7,6,2],[7,9,2],[7,1,1],[7,5,28],[7,1,1],[7,2,2],[7,7,2],[7,11,1],[7,2,1],[7,17,32],[7,5,1],[7,2,1],[7,3,2],[7,7,4],[7,15,3],[7,3,1],[7,6,2],[7,1,1],[7,2,1],[7,1,1],[7,1,11],[7,2,1],[7,8,1],[7,6,1],[7,2,1],[7,57,1],[7,20,46],[7,6,2],[7,6,1],[7,1,2],[7,28,7],[7,3,5],[7,4,1],[7,4,6],[7,2,2],[7,3,3],[7,2,3],[7,2,1],[7,1,1],[7,2,6],[7,4,1],[7,3,1],[7,23,1],[7,7,2],[7,7,1],[7,4,3],[7,2,1],[7,1,1],[7,4,2],[7,15,2],[7,6,1],[7,2,1],[7,14,1],[7,1,1],[7,1,1],[7,4,2],[7,2,1],[7,4,1],[7,2,1],[7,4,3],[7,22,1],[7,10,1],[7,2,1],[7,1,2],[7,7,2],[7,1,2],[7,12,1],[7,3,1],[7,2,4],[7,3,8],[7,2,1],[7,6,1],[7,5,3],[7,8,2],[7,5,1],[7,6,1],[7,6,1],[7,5,1],[7,9,5],[7,3,1],[7,3,2],[7,3,19],[7,28,3],[7,2,2],[7,3,1],[7,51,4],[7,2,1],[7,2,1],[7,22,2],[7,5,1],[7,2,1],[7,4,2],[7,2,1],[7,6,2],[7,6,1],[7,3,1],[7,37,1],[7,9,1],[7,8,2],[7,2,1],[7,4,1],[7,2,1],[7,18,1],[7,9,2],[7,1,1],[7,5,1],[7,2,1],[7,13,1],[7,45,1],[7,1,3],[7,7,5],[7,16,1],[7,7,1],[7,1,1],[7,3,1],[7,8,1],[7,1,1],[7,1,4],[7,2,2],[7,6,1],[7,6,1],[7,2,1],[7,16,1],[7,11,1],[7,1,1],[7,2,1],[7,3,2],[7,8,8],[7,33,1],[7,2,8],[7,4,1],[7,6,7],[7,12,3],[7,17,1],[7,9,5],[7,3,2],[7,3,2],[7,4,1],[7,1,1],[7,2,2],[7,6,1],[8,9,1],[8,79,3],[8,3,1],[8,14,4],[8,2,4],[8,10,5],[8,7,3],[8,8,1],[8,6,1],[8,7,1],[8,8,2],[8,9,1],[8,30,2],[8,1,1],[8,1,5],[8,15,2],[8,10,3],[8,5,3],[8,1,2],[8,3,1],[8,16,1],[8,3,1],[8,3,3],[8,3,4],[8,2,1],[8,6,2],[8,4,4],[8,5,3],[8,8,4],[8,8,3],[8,4,3],[8,13,7],[8,2,1],[8,2,1],[8,1,1],[8,4,1],[8,10,3],[8,16,9],[8,3,2],[8,1,2],[8,2,5],[8,5,2],[8,156,14],[8,1,1],[8,5,1],[8,252,690],[8,5,1],[8,25,21],[8,1,1],[8,39,12],[8,1,4],[8,6,1],[8,25,7],[8,1,1],[8,7,1],[8,46,11],[8,3,1],[8,1,1],[8,14,1],[8,24,1],[8,16,3],[8,6,3],[8,5,1],[8,1,2],[8,12,2],[8,2,1],[8,2,5],[8,6,1],[8,6,1],[8,14,1],[8,7,1],[8,6,1],[8,4,6],[8,1,2],[8,3,1],[8,2,14],[8,7,12],[8,2,2],[8,25,15],[8,8,3],[8,6,6],[8,5,1],[8,1,1],[8,2,3],[8,18,3],[8,2,2],[8,3,1],[8,4,1],[8,3,3],[8,4,2],[8,12,2],[8,1,1],[8,4,1],[8,18,1],[8,2,2],[8,11,3],[8,5,1],[8,6,1],[8,13,1],[8,6,1],[8,23,1],[8,18,3],[8,13,2],[8,4,1],[8,38,4],[8,1,1],[8,6,1],[8,10,2],[8,2,7],[8,10,7],[8,1,1],[8,4,7],[8,2,1],[8,2,2],[8,7,1],[8,17,1],[8,10,5],[8,4,4],[8,8,4],[8,3,2],[8,2,1],[8,33,1],[8,8,6],[8,15,1],[8,2,1],[8,7,4],[8,6,3],[8,2,1],[8,1,2],[8,3,1],[8,4,1],[8,4,2],[8,27,1],[8,10,1],[9,8,2],[9,2,2],[9,7,1],[9,11,1],[9,35,5],[9,3,1],[9,2,2],[9,6,7],[9,16,2],[9,7,15],[9,3,1],[9,9,1],[9,5,1],[9,3,1],[9,3,1],[9,4,1],[9,2,5],[9,1,1],[9,5,4],[9,1,1],[9,13,1],[9,14,4],[9,3,1],[9,35,3],[9,41,1],[9,8,3],[9,2,5],[9,8,2],[9,13,3],[9,10,1],[9,4,1],[9,35,12],[9,9,1],[9,12,1],[9,4,1],[9,2,4],[9,1,2],[9,6,4],[9,1,4],[9,20,3],[9,4,3],[9,3,3],[9,1,4],[9,2,11],[9,11,2],[9,19,1],[9,5,1],[9,6,2],[9,1,1],[9,3,1],[9,15,3],[9,2,1],[9,6,1],[9,13,1],[9,2,1],[9,11,2],[9,3,5],[9,6,1],[9,16,1],[9,4,1],[9,3,2],[9,3,1],[9,2,5],[9,13,1],[9,3,1],[9,2,2],[9,7,1],[9,2,3],[9,3,4],[9,5,1],[9,4,1],[9,10,2],[9,36,1],[9,7,2],[9,3,1],[9,4,2],[9,5,5],[9,12,1],[9,4,1],[9,2,2],[9,12,1],[9,13,1],[9,12,1],[9,2,4],[9,1,1],[9,1,2],[9,6,6],[9,1,2],[9,8,4],[9,7,2],[9,15,4],[10,3,25],[10,2,1],[10,4,2],[10,8,1],[10,2,1],[10,1,1],[10,21,1],[10,21,19],[10,4,4],[10,4,8],[10,2,1],[10,1,3],[10,3,5],[10,6,1],[10,8,5],[10,4,1],[10,24,5],[10,2,2],[10,24,1],[10,6,4],[10,1,2],[10,25,1],[10,14,1],[10,6,3],[10,2,3],[10,6,1],[10,15,2],[10,54,3],[10,12,1],[10,21,1],[10,7,1],[10,4,4],[10,5,1],[10,10,3],[10,37,1],[10,8,3],[10,11,1],[10,2,4],[10,6,1],[10,30,1],[10,35,1],[10,4,2],[10,2,1],[10,5,2],[10,6,1],[10,4,4],[10,12,1],[10,12,1],[10,44,4],[10,16,3],[10,1,64],[10,27,1],[10,9,3],[10,17,2],[10,25,2],[10,2,2],[10,7,3],[10,89,1],[10,7,30],[10,2,4],[10,2,3],[10,2,1],[10,3,3],[10,11,1],[10,7,1],[10,2,1],[10,4,2],[10,1,1],[10,1,1],[10,6,2],[10,7,3],[10,4,1],[10,2,2],[10,18,1],[10,4,1],[10,19,1],[10,14,6],[10,5,1],[10,5,6],[10,12,1],[11,5,6],[11,15,8],[11,9,1],[11,3,2],[11,6,3],[11,24,4],[11,27,3],[11,2,2],[11,5,9],[11,13,1],[11,3,1],[11,2,25],[11,10,1],[11,4,11],[11,7,2],[11,49,1],[11,4,1],[11,12,1],[11,7,1],[11,1,2],[11,10,6],[11,2,1],[11,4,2],[11,1,2],[11,2,1],[11,5,1],[11,4,3],[11,1,1],[11,6,1],[11,4,3],[11,95,2],[11,8,1],[11,18,1],[11,5,1],[11,16,12],[11,13,2],[11,7,6],[11,56,1],[11,6,1],[11,8,1],[11,21,14],[11,2,7],[11,5,1],[11,1,1],[11,5,2],[11,2,1],[11,15,1],[11,3,3],[11,26,1],[11,6,6],[11,1,1],[11,10,7],[11,6,3],[11,6,1],[11,8,2],[11,1,2],[11,35,2],[11,19,2],[11,8,2],[11,4,1],[11,7,2],[11,4,5],[11,3,5],[11,17,1],[11,3,3],[11,2,1],[11,12,1],[11,2,8],[11,85,1],[11,4,1],[11,9,1],[11,2,2],[11,2,1],[11,6,2],[11,6,3],[11,18,3],[11,1,1],[11,8,1],[11,22,1],[11,7,1],[11,4,2],[11,4,1],[11,8,3],[11,10,4],[11,24,1],[11,10,19],[11,12,8],[12,5,1],[12,1,7],[12,4,1],[12,21,6],[12,12,2],[12,16,1],[12,1,1],[12,2,1],[12,3,1],[12,8,9],[12,1,1],[12,17,2],[12,16,6],[12,14,1],[12,3,3],[12,27,3],[12,2,1],[12,3,3],[12,14,4],[12,1,3],[12,10,1],[12,5,7],[12,7,3],[12,13,5],[12,4,1],[12,47,4],[12,18,1],[12,31,2],[12,8,1],[12,5,4],[12,1,1],[12,26,1],[12,13,2],[12,5,2],[12,4,3],[12,15,5],[12,2,1],[12,2,1],[12,3,1],[12,5,1],[12,11,1],[12,4,3],[12,1,1],[12,7,2],[12,6,1],[12,14,6],[12,32,4],[12,14,1],[12,31,1],[12,7,3],[12,9,7],[12,5,1],[12,6,1],[12,6,6],[12,7,8],[12,2,1],[12,3,1],[12,4,3],[12,1,1],[12,19,2],[12,11,1],[12,7,2],[12,8,1],[12,15,4],[12,5,1],[12,9,3],[12,2,1],[12,1,1],[12,8,9],[12,3,6],[12,15,1],[13,1,11],[13,7,2],[13,10,1],[13,13,4],[13,3,2],[13,1,2],[13,2,1],[13,3,4],[13,3,1],[13,4,3],[13,5,1],[13,10,13],[13,5,4],[13,2,3],[13,3,2],[13,72,2],[13,7,3],[13,19,2],[13,4,1],[13,5,6],[13,4,2],[13,2,1],[13,2,1],[13,34,11],[13,5,2],[13,9,5],[13,6,2],[13,5,5],[13,9,5],[13,9,1],[13,19,3],[13,4,1],[13,3,1],[13,7,2],[13,1,1],[13,11,7],[13,4,7],[13,6,1],[13,2,1],[13,1,1],[13,21,1],[13,6,15],[13,5,2],[13,1,1],[13,1,2],[14,2,1],[14,18,1],[14,8,2],[14,5,1],[14,2,2],[14,5,2],[14,2,1],[14,8,2],[14,4,1],[14,8,5],[14,14,1],[14,9,6],[14,18,2],[14,4,1],[14,6,1],[14,18,1],[14,6,6],[14,4,1],[14,6,2],[14,6,8],[14,3,1],[14,2,3],[14,1,1],[14,17,4],[14,4,3],[14,15,3],[14,4,8],[14,15,2],[14,6,1],[14,9,22],[14,7,3],[14,7,6],[14,2,2],[14,1,1],[14,7,4],[14,10,1],[14,1,1]])\n #data = np.array([[131,3,1],[49,1,1],[17,7,1],[55,7,19],[80,5,1],[40,2,2],[91,21,6],[19,16,1],[27,7,1],[15,50,2],[37,1,7],[17,3,1],[22,32,2],[68,2,1],[26,2,3],[15,2,3],[246,2,1],[25,2,1],[19,1,1],[98,1,2],[54,13,1],[168,2,4],[20,102,5],[40,2,1],[41,1,1],[44,19,16],[17,6,1],[92,12,1],[17,2,1],[16,5,3],[45,11,1],[20,10,1],[26,1,2],[21,9,9],[26,10,1],[187,4,2],[65,28,4],[17,9,33],[23,39,1],[58,4,4],[41,107,3],[28,3,1],[16,1,1],[17,16,4],[17,16,1],[17,5,1],[83,2,2],[17,1,2],[26,4,2],[22,7,2],[16,1,1],[15,2,1],[15,2,1],[111,8,1],[25,6,1],[112,4,1],[19,10,2],[38,25,4],[29,1,5],[17,2,1],[111,9,8],[53,5,4],[29,7,1],[25,8,2],[23,2,134],[32,6,1],[27,1,1],[61,4,2],[41,163,4],[57,11,2],[24,2,1],[16,18,1],[81,7,14],[169,5,1],[19,4,1],[412,5,1],[32,2,7],[19,28,3],[17,11,1],[44,4,5],[27,2,2],[18,1,7],[15,3,3],[18,10,1],[19,6,10],[46,2,5],[20,12,3],[25,6,4],[18,4,1],[15,40,8],[16,11,16],[237,1,1],[26,13,2],[26,4,1],[101,5,5],[50,2,1],[22,45,5],[16,7,2],[17,4,2],[19,2,3],[22,1,1],[260,6,1],[20,15,1],[24,5,1],[33,2,1],[16,1,5],[21,18,1],[22,1,1],[18,13,2],[124,3,1],[16,6,1],[19,6,2],[71,2,1],[232,2,2],[21,2,1],[231,11,1],[201,49,2],[28,12,1],[68,5,1],[56,26,7],[17,1,8],[19,10,2],[120,13,2],[218,3,1],[46,5,6],[57,4,1],[30,5,2],[17,8,4],[17,22,1],[15,5,1],[16,7,1],[26,13,1],[28,22,2],[100,1,2],[58,12,2],[52,9,11],[21,4,2],[18,4,1],[699,1,1],[401,6,3],[20,7,1],[20,3,13],[27,1,1],[35,2,2],[27,6,1],[15,13,1],[17,6,1],[26,28,4],[89,2,3],[36,11,2],[17,11,2],[15,1,1],[59,3,1],[15,3,1],[20,11,1],[49,1,1],[24,3,1],[25,7,1],[29,1,1],[61,2,2],[28,3,13],[82,2,8],[22,2,1],[21,25,3],[73,3,2],[22,8,1],[51,3,12],[16,6,1],[64,2,4],[22,2,2],[19,7,1],[69,2,1],[17,8,9],[19,1,13],[28,35,3],[134,2,1],[19,12,1],[27,13,1],[17,10,1],[16,17,4],[46,2,3],[15,1,2],[35,15,2],[20,6,1],[16,10,3],[33,11,1],[20,8,4],[15,5,1],[33,5,2],[460,6,1],[132,2,1],[73,14,3],[34,5,1],[123,1,2],[15,8,1],[30,1,1],[16,1,1],[73,3,1],[54,4,1],[17,1,9],[17,17,3],[22,1,3],[46,16,8],[18,1,1],[22,3,2],[21,4,1],[40,5,1],[19,2,1],[16,11,1],[19,4,1],[26,4,1],[87,1,3],[75,1,8],[25,1,1],[2230,5,1],[16,1,1],[17,10,3],[15,44,2],[79,3,1],[21,19,1],[292,5,13],[27,4,1],[25,2,1],[23,34,1],[36,2,1],[15,2,7],[18,3,3],[62,1,7],[16,61,5],[15,5,1],[36,5,1],[67,8,3],[18,4,1],[23,2,1],[16,21,3],[32,7,1],[22,6,1],[88,5,1],[19,2,4],[38,2,1],[47,6,28],[18,35,3],[159,15,1],[25,3,5],[295,9,4],[26,2,1],[27,8,3],[86,6,1],[24,25,4],[18,1,2],[16,6,1],[64,16,1],[39,1,2],[30,1,4],[44,1,3],[82,11,4],[28,13,2],[46,19,1],[15,26,1],[30,6,11],[51,3,6],[19,20,1],[940,6,4],[21,6,1],[29,2,1],[20,2,1],[31,2,1],[21,2,3],[25,27,1],[26,2,1],[17,4,1],[64,7,1],[126,7,15],[18,8,1],[20,13,2],[16,7,2],[18,2,1],[19,4,5],[29,1,1],[80,12,2],[42,14,6],[107,2,1],[15,4,1],[48,16,1],[62,3,2],[15,13,1],[29,48,7],[25,4,1],[17,5,20],[19,7,3],[22,10,3],[58,15,3],[17,14,1],[121,2,2],[33,64,11],[16,15,2],[39,6,2],[25,69,7],[69,2,1],[41,6,2],[20,5,1],[42,22,4],[18,17,4],[16,14,3],[27,14,1],[20,1,1],[44,1,101],[33,9,1],[26,2,8],[30,24,3],[27,24,2],[34,7,1],[39,6,3],[20,2,3],[55,5,1],[22,22,2],[17,2,1],[55,3,1],[29,10,5],[60,12,2],[18,13,3],[93,3,2],[15,3,1],[26,5,5],[18,1,1],[17,16,2],[15,13,3],[22,12,1],[256,19,27],[18,7,8],[22,3,1],[35,3,4],[16,2,1],[19,6,2],[24,1,1],[29,3,2],[36,21,8],[24,1,1],[18,6,2],[26,24,11],[19,15,2],[16,1,1],[28,4,1],[60,11,1],[62,4,2],[70,2,1],[75,1,2],[125,3,1],[21,6,1],[165,23,2],[108,1,1],[35,5,1],[251,19,12],[137,4,1],[81,11,4],[104,19,4],[18,18,3],[19,13,1],[18,112,5],[19,6,2],[28,7,2],[23,9,1],[20,15,7],[34,1,1],[24,12,3],[15,5,1],[40,9,4],[24,41,6],[35,1,1],[17,3,1],[17,3,4],[46,7,2],[21,8,10],[17,7,4],[36,6,1],[32,6,2],[31,1,1],[17,32,5],[26,3,4],[16,4,1],[21,2,1],[19,4,1],[33,4,1],[46,7,1],[28,9,1],[169,9,24],[24,18,2],[103,6,1],[93,1,1],[156,2,1],[58,7,1],[55,30,3],[15,5,1],[20,9,1],[19,20,1],[44,1,3],[16,2,1],[23,4,1],[22,10,1],[16,138,5],[17,2,1],[17,1,2],[70,8,5],[15,3,6],[22,6,1],[20,1,1],[35,2,4],[15,3,1],[26,119,46],[390,18,2],[22,4,1],[175,5,2],[23,4,1],[26,2,21],[17,1,2],[112,4,1],[18,22,5],[22,2,1],[122,13,1],[18,1,1],[27,7,1],[26,18,5],[18,1,3],[28,1,15],[35,11,1],[15,2,1],[55,6,5],[67,3,1],[30,5,7],[31,12,1],[16,9,12],[43,7,1],[23,21,1],[43,2,7],[53,40,1],[58,6,1],[29,27,11],[65,6,2],[27,4,2],[15,7,2],[17,26,13],[48,4,79],[30,2,6],[25,1,1],[20,20,6],[59,2,5],[15,14,4],[18,7,1],[18,2,1],[28,7,1],[35,1,1],[15,12,4],[52,2,2],[16,25,1],[91,1,1],[27,7,3],[62,4,1],[29,11,1],[25,4,3],[15,1,1],[40,6,2],[19,2,2],[24,14,2],[33,5,1],[58,3,3],[23,1,4],[15,2,2],[1263,4,1],[92,5,1],[17,2,1],[16,10,1],[50,8,1],[24,2,1],[73,1,1],[30,33,55],[18,15,1],[15,9,4],[23,1,3],[17,5,1],[43,3,1],[15,9,2],[19,4,2],[20,20,4],[31,1,2],[21,3,1],[79,9,13],[20,3,24],[56,2,1],[26,1,2],[15,3,1],[30,12,1],[64,6,1],[327,8,47],[39,2,1],[22,17,5],[18,6,3],[74,14,2],[17,4,1],[39,1,3],[520,9,3],[65,9,1],[36,1,4],[264,3,3],[16,1,1],[18,5,3],[22,16,3],[21,2,1],[15,3,3],[49,5,1],[37,19,2],[19,13,2],[30,1,1],[44,4,1],[19,9,31],[22,4,2],[21,4,5],[16,4,1],[40,17,1],[15,12,4],[43,4,3],[21,30,1],[60,16,3],[28,2,1],[38,16,2],[19,3,1],[68,18,4],[1,4,3],[1,9,1],[1,2,2],[1,1,4],[1,148,4],[1,6,1],[1,16,1],[1,4,1],[1,19,3],[1,7,3],[1,2,2],[1,4,2],[1,47,5],[1,2,2],[1,1,4],[1,1,2],[1,1,2],[1,1,1],[1,4,2],[1,7,1],[1,4,6],[1,2,1],[1,5,4],[1,9,3],[1,9,2],[1,7,1],[1,4,1],[1,10,2],[1,1,1],[1,5,1],[1,5,1],[1,2,16],[1,2,1],[1,1,1],[1,3,2],[1,8,3],[1,1,18],[1,5,1],[1,14,3],[1,6,6],[1,7,1],[1,1,1],[1,16,1],[1,2,1],[1,2,1],[1,1,2],[1,4,4],[1,4,1],[1,9,1],[1,25,7],[1,1,1],[1,8,2],[1,1,4],[1,77,8],[1,1,3],[1,6,3],[1,4,2],[1,2,2],[1,2,1],[1,40,1],[1,26,3],[1,1,4],[1,1,1],[1,2,2],[1,1,2],[1,15,1],[1,35,86],[1,3,2],[1,4,1],[1,2,1],[1,4,3],[1,30,1],[1,2,1],[1,4,2],[1,2,1],[1,1,1],[1,2,1],[1,3,1],[1,2,3],[1,3,1],[1,14,1],[1,3,2],[1,7,4],[1,6,2],[1,2,1],[1,23,2],[1,4,1],[1,4,3],[1,26,3],[1,47,15],[1,3,5],[1,5,1],[1,3,1],[1,2,1],[1,2,1],[1,3,1],[1,36,1],[1,2,1],[1,1,9],[1,6,1],[1,2,1],[1,8,3],[1,7,1],[1,33,2],[1,14,4],[1,13,3],[1,2,1],[1,5,1],[1,7,2],[1,9,3],[1,6,1],[1,3,1],[1,9,1],[1,2,2],[1,2,1],[1,6,3],[1,4,2],[1,2,1],[1,1,1],[1,13,4],[1,9,2],[1,4,2],[1,7,14],[1,8,1],[1,3,1],[1,25,2],[1,2,1],[1,11,1],[1,2,1],[1,1,1],[1,3,3],[1,3,2],[1,2,1],[1,2,1],[1,2,8],[1,9,1],[1,13,9],[1,3,1],[1,8,1],[1,102,71],[1,22,1],[1,2,3],[1,22,2],[1,1,1],[1,3,1],[1,12,1],[1,3,2],[1,1,1],[1,5,2],[1,30,6],[1,14,1],[1,2,1],[1,1,1],[1,5,1],[1,8,1],[1,4,2],[1,3,1],[1,2,1],[1,1,1],[1,1,1],[1,12,1],[1,14,1],[1,10,2],[1,22,3],[1,15,2],[1,4,2],[1,5,1],[1,10,2],[1,10,26],[1,1,2],[1,1,2],[1,17,1],[1,1,1],[1,7,1],[1,1,1],[1,8,2],[1,5,2],[1,15,1],[1,16,2],[1,7,1],[1,26,1],[1,16,2],[1,13,6],[1,3,3],[1,2,1],[1,2,1],[1,5,3],[1,1,1],[1,4,1],[1,1,1],[1,2,2],[1,13,4],[1,50,2],[1,12,3],[1,2,1],[1,16,5],[1,2,8],[1,3,5],[1,1,1],[1,25,1],[1,5,1],[1,13,2],[1,1,2],[1,8,1],[1,13,1],[1,4,4],[1,2,3],[1,7,2],[1,2,4],[1,2,1],[1,1,2],[1,4,1],[1,3,2],[1,8,4],[1,4,1],[1,2,2],[1,2,1],[1,3,1],[1,7,1],[1,8,5],[1,34,4],[1,2,3],[1,1,1],[1,8,3],[1,3,1],[1,26,2],[1,3,1],[1,1,6],[1,2,4],[1,7,1],[1,9,2],[1,3,93],[1,2,1],[1,3,2],[1,3,3],[1,15,3],[1,12,1],[1,1,1],[1,1,5],[1,4,1],[1,1,4],[1,2,1],[1,6,4],[1,9,1],[1,1,9],[1,11,1],[1,68,2],[1,7,1],[1,11,1],[1,6,1],[1,5,2],[1,2,1],[1,19,1],[1,3,1],[1,1,2],[1,37,1],[1,19,1],[1,4,5],[1,8,1],[1,1,1],[1,7,1],[1,3,1],[1,4,1],[1,6,7],[1,2,1],[1,14,3],[1,4,1],[1,6,5],[1,1,1],[1,1,1],[1,2,1],[1,1,2],[1,7,2],[1,8,1],[1,17,136],[1,6,1],[1,3,2],[1,9,12],[1,7,2],[1,2,9],[1,1,4],[1,3,1],[1,10,1],[1,6,16],[1,8,1],[1,2,2],[1,2,2],[1,4,3],[1,3,3],[1,24,3],[1,68,28],[1,16,1],[1,9,2],[1,1,2],[1,18,7],[1,3,1],[1,5,2],[1,1,3],[1,3,1],[1,3,8],[1,73,5],[1,6,3],[1,5,1],[1,2,1],[1,15,7],[1,80,2],[1,3,1],[1,12,3],[1,8,1],[1,2,1],[1,9,5],[1,3,2],[1,319,20],[1,2,1],[1,4,6],[1,5,4],[1,25,1],[1,8,1],[1,6,5],[1,18,1],[1,2,2],[1,5,2],[1,10,1],[1,10,1],[1,2,1],[1,6,2],[1,7,2],[1,39,1],[1,7,79],[1,28,4],[1,2,1],[1,4,1],[1,25,5],[1,23,3],[1,10,3],[1,2,1],[1,13,1],[1,2,2],[1,6,1],[1,6,4],[1,12,1],[1,4,1],[1,3,1],[1,10,1],[1,4,2],[1,7,1],[1,11,1],[1,6,1],[1,4,2],[1,3,3],[1,1,1],[1,1,1],[1,3,3],[1,3,2],[1,15,1],[1,1,1],[1,1,4],[1,26,2],[1,1,1],[1,7,1],[1,4,63],[1,1,19],[1,96,7],[1,7,2],[1,6,1],[1,4,1],[1,18,2],[1,1,2],[1,4,1],[1,3,3],[1,18,1],[1,3,1],[1,14,1],[1,6,2],[1,13,1],[1,1,5],[1,13,2],[1,1,1],[1,4,4],[1,10,1],[1,2,1],[1,12,3],[1,7,1],[1,8,1],[1,3,1],[1,2,2],[1,4,5],[1,9,1],[1,2,1],[1,2,1],[1,6,8],[1,32,3],[1,3,2],[1,6,1],[1,5,1],[1,7,1],[1,4,2],[1,2,1],[1,5,4],[1,1,2],[1,9,1],[1,2,1],[1,11,1],[1,5,2],[1,2,1],[1,1,1],[1,3,1],[1,7,13],[1,4,4],[1,1,1],[1,6,1],[1,1,3],[1,6,6],[1,6,1],[1,4,4],[1,10,1],[1,15,1],[1,3,7],[1,6,1],[1,9,1],[1,14,23],[1,14,2],[1,6,3],[1,2,1],[1,9,1],[1,1,3],[1,6,4],[1,15,2],[1,8,1],[1,6,6],[1,16,10],[1,5,4],[1,30,3],[1,7,1],[1,4,1],[1,3,1],[1,6,6],[1,1,2],[1,3,2],[1,1,1],[1,1,1],[1,1,1],[1,2,5],[1,2,1],[1,2,5],[1,24,1],[1,3,1],[1,6,1],[1,2,1],[1,4,1],[1,2,2],[1,4,1],[1,1,1],[1,3,1],[1,8,2],[1,4,2],[1,2,2],[1,2,1],[1,12,6],[1,2,1],[1,32,42],[1,7,1],[1,7,1],[1,12,1],[1,2,1],[1,6,1],[1,42,1],[1,2,1],[1,1,2],[1,2,1],[1,6,1],[1,2,2],[1,8,1],[1,22,4],[1,1,1],[1,11,20],[1,6,2],[1,2,1],[1,4,2],[1,9,1],[1,10,1],[1,16,5],[1,3,2],[1,8,1],[1,6,3],[1,1,2],[1,6,1],[1,2,1],[1,28,1],[1,18,1],[1,17,8],[1,4,1],[1,2,2],[1,13,1],[1,25,3],[1,7,4],[1,3,1],[1,1,1],[1,3,3],[1,4,1],[1,7,5],[1,2,2],[1,5,1],[1,2,2],[1,2,2],[1,14,1],[1,3,3],[1,4,1],[1,1,2],[1,11,1],[1,2,1],[1,6,1],[1,7,6],[1,7,1],[1,2,2],[1,2,1],[1,31,4],[1,4,3],[1,14,6],[1,4,4],[1,1,1],[1,2,1],[1,12,5],[1,4,1],[1,7,1],[1,3,1],[1,4,1],[1,11,1],[1,12,1],[1,3,2],[1,9,1],[1,17,2],[1,9,5],[1,6,1],[1,13,2],[1,5,1],[1,4,3],[1,3,1],[1,1,4],[1,7,1],[1,4,1],[1,3,1],[1,56,3],[1,1,1],[1,9,1],[1,4,1],[1,15,1],[1,2,1],[1,12,1],[1,4,2],[1,1,1],[1,1,1],[1,149,2],[1,56,1],[1,4,5],[1,2,2],[1,11,3],[1,2,3],[1,1,2],[1,2,1],[1,15,4],[1,2,2],[1,4,1],[1,17,2],[1,10,5],[1,14,2],[1,8,2],[1,4,2],[1,4,1],[1,6,1],[1,5,1],[1,7,2],[1,20,5],[1,3,1],[1,4,1],[1,11,1],[1,2,1],[1,1,3],[1,5,2],[1,6,1],[1,4,3],[1,4,3],[1,4,2],[1,7,3],[1,5,1],[1,1,1],[1,2,1],[1,8,1],[1,7,1],[1,2,1],[1,1,1],[1,1,1],[1,4,3],[1,11,1],[1,43,1],[1,7,8],[1,8,1],[1,1,1],[1,8,6],[1,9,3],[1,19,1],[1,2,1],[1,43,3],[1,4,5],[1,2,3],[1,4,1],[1,17,1],[1,9,1],[1,8,72],[1,2,1],[1,4,2],[1,16,1],[1,15,1],[1,8,1],[1,3,1],[1,7,8],[1,4,1],[1,23,2],[1,1,2],[1,1,1],[1,15,7],[1,7,4],[1,3,4],[1,5,1],[1,1,1],[1,6,83],[1,1,1],[1,4,3],[1,2,1],[1,3,2],[1,9,2],[1,5,1],[1,22,1],[1,3,6],[1,6,4],[1,4,1],[1,1,4],[1,1,1],[1,5,3],[1,1,2],[1,15,2],[1,8,1],[1,5,2],[1,1,1],[1,4,10],[1,63,1],[1,2,2],[1,2,1],[1,9,1],[1,4,3],[1,2,1],[1,24,1],[1,2,2],[1,2,2],[1,6,2],[1,13,5],[1,34,5],[1,10,1],[1,3,1],[1,22,9],[1,41,1],[1,1,4],[1,13,2],[1,18,1],[1,4,4],[1,7,1],[1,4,3],[1,14,4],[1,3,2],[1,2,1],[1,7,10],[1,15,3],[1,6,1],[1,1,1],[1,2,5],[1,4,10],[1,5,2],[1,12,6],[1,6,1],[1,19,134],[1,11,1],[1,233,9],[1,4,2],[1,40,1],[1,2,1],[1,10,1],[1,3,1],[1,3,1],[1,3,1],[1,35,1],[1,2,7],[1,1,3],[1,3,1],[1,14,2],[1,1,1],[1,7,1],[1,6,5],[1,10,1],[1,5,3],[1,8,1],[1,11,1],[1,13,1],[1,8,9],[1,5,1],[1,3,1],[1,11,1],[1,2,1],[1,5,1],[1,7,1],[1,9,3],[1,2,3],[1,2,2],[1,29,2],[1,2,1],[1,4,3],[1,1,2],[1,2,2],[1,3,6],[1,11,1],[1,1,1],[1,11,1],[1,4,1],[1,6,1],[1,3,5],[1,4,1],[1,4,3],[1,34,1],[1,4,2],[1,1,9],[1,18,1],[1,9,3],[1,15,1],[1,4,4],[1,4,2],[1,9,1],[1,4,1],[1,10,1],[1,2,1],[1,2,4],[1,4,1],[1,1,2],[1,3,3],[1,2,1],[1,47,14],[1,3,1],[1,2,1],[1,3,1],[1,1,1],[1,20,1],[1,14,6],[1,2,2],[1,16,2],[1,2,1],[1,1,31],[1,5,9],[1,10,2],[1,10,3],[1,19,1],[1,1,1],[1,13,2],[1,5,1],[1,1,2],[1,1,2],[1,24,1],[1,9,2],[1,4,1],[1,10,3],[1,35,6],[1,1,1],[1,2,1],[1,1,1],[1,3,1],[1,4,5],[1,4,1],[1,1,1],[1,4,1],[1,10,2],[1,55,6],[1,3,22],[1,28,4],[1,6,3],[1,10,1],[1,6,187],[1,3,2],[1,12,5],[1,7,1],[1,4,1],[1,2,2],[1,2,1],[1,31,9],[1,2,8],[1,20,2],[1,36,2],[1,2,2],[1,15,5],[1,5,2],[1,3,2],[1,8,1],[1,1,1],[1,2,1],[1,37,1],[1,17,4],[1,8,1],[1,19,2],[1,7,1],[1,1,1],[1,1,1],[1,2,1],[1,9,1],[1,2,1],[1,2,1],[1,2,1],[1,19,1],[1,33,3],[1,4,1],[1,7,1],[1,3,1],[1,46,4],[1,2,1],[1,3,2],[1,1,2],[1,2,2],[1,14,1],[1,3,1],[1,11,2],[1,2,2],[1,21,2],[1,34,2],[1,4,1],[1,1,1],[1,2,1],[1,22,1],[1,64,9],[1,21,10],[1,3,3],[1,6,1],[1,16,2],[1,3,1],[1,31,4],[1,1,1],[1,1,2],[1,1,1],[1,3,1],[1,5,4],[1,27,1],[1,1,1],[1,2,2],[1,17,10],[1,4,1],[1,25,1],[1,41,1],[1,18,4],[1,17,40],[1,9,1],[1,2,1],[1,7,1],[1,21,2],[1,2,3],[1,3,1],[1,14,1],[1,8,2],[1,2,1],[1,2,2],[1,5,1],[1,1,2],[1,4,1],[1,6,5],[1,9,17],[1,5,1],[1,6,1],[1,4,1],[1,1,1],[1,3,1],[1,61,9],[1,6,1],[1,9,2],[1,2,2],[1,9,1],[1,7,4],[1,12,1],[1,2,2],[1,40,1],[1,17,13],[1,1,7],[1,11,2],[1,20,2],[1,2,1],[1,1,1],[1,12,10],[1,5,3],[1,2,1],[1,1,1],[1,23,2],[1,9,3],[1,4,1],[1,5,2],[1,4,1],[1,19,5],[1,5,1],[1,1,4],[1,5,1],[1,8,1],[1,9,1],[1,5,3],[1,43,3],[1,1,2],[1,3,1],[1,2,2],[1,15,38],[1,3,1],[1,25,1],[1,1,4],[1,5,6],[1,2,1],[1,4,3],[1,4,2],[1,3,1],[1,9,1],[1,4,1],[1,13,2],[1,7,4],[1,2,6],[1,12,1],[1,8,3],[1,1,4],[1,13,1],[1,3,4],[1,3,2],[1,2,2],[1,4,1],[1,6,1],[1,14,3],[1,7,1],[1,8,1],[1,8,1],[1,3,1],[1,32,5],[1,16,2],[1,2,3],[1,38,1],[1,5,4],[1,10,2],[1,2,7],[1,3,1],[1,8,1],[1,3,2],[1,1,3],[1,4,2],[1,71,12],[1,8,4],[1,2,12],[1,3,1],[1,12,2],[1,2,1],[1,5,1],[1,2,28],[1,19,5],[1,10,1],[1,9,2],[1,3,1],[1,7,6],[1,11,1],[1,2,1],[1,27,2],[1,7,4],[1,4,2],[1,12,8],[1,8,96],[1,12,1],[1,2,4],[1,965,1303],[1,7,5],[1,15,3],[1,3,2],[1,18,2],[1,25,3],[1,7,2],[1,18,2],[1,6,1],[1,10,2],[1,4,1],[1,1,3],[1,5,1],[1,19,2],[1,8,1],[1,50,4],[1,8,1],[1,11,1],[1,9,1],[1,2,1],[1,2,5],[1,3,1],[1,6,2],[1,1,1],[1,13,5],[1,19,1],[1,7,2],[1,17,1],[1,6,1],[1,4,1],[1,7,3],[1,13,3],[1,7,4],[1,5,2],[1,4,1],[1,11,16],[1,7,1],[1,1,1],[1,2,1],[1,2,1],[1,14,3],[1,30,1],[1,2,6],[1,6,2],[1,3,1],[1,4,1],[1,9,11],[1,6,1],[1,35,1],[1,2,8],[1,1,2],[1,3,2],[1,1,1],[1,9,1],[1,2,57],[1,2,1],[1,5,1],[1,4,2],[1,15,1],[1,12,3],[1,4,3],[1,17,1],[1,12,2],[1,21,12],[1,2,1],[1,9,1],[1,9,47],[1,49,4],[1,5,1],[1,4,1],[1,24,1],[1,2,2],[1,64,2],[1,48,7],[1,2,2],[1,10,2],[1,3,1],[1,11,1],[1,5,1],[1,1,2],[1,2,4],[1,6,1],[1,19,6],[1,6,2],[1,3,2],[1,1,1],[1,22,2],[1,3,2],[1,5,14],[1,2,1],[1,11,1],[1,4,2],[1,6,1],[1,24,10],[1,7,1],[1,2,74],[1,6,1],[1,28,1],[1,1,1],[1,1,1],[1,10,1],[1,88,4],[1,9,4],[1,26,1],[1,3,1],[1,4,1],[1,4,1],[1,6,1],[1,23,1],[1,2,7],[1,1,3],[1,7,1],[1,1,1],[1,5,2],[1,4,1],[1,2,1],[1,1,1],[1,15,5],[1,22,1],[1,6,3],[1,12,2],[1,48,14],[1,7,1],[1,5,1],[1,10,5],[1,5,1],[1,6,5],[1,2,3],[1,14,3],[1,3,1],[1,8,4],[1,2,5],[1,34,3],[1,2,1],[1,4,1],[1,6,7],[1,3,1],[1,3,3],[1,32,2],[1,3,1],[1,3,1],[1,2,1],[1,3,1],[1,39,8],[1,1,1],[1,15,8],[1,3,4],[1,2,3],[1,1,3],[1,38,18],[1,6,1],[1,25,4],[1,2,1],[1,8,1],[1,3,1],[1,24,1],[1,5,5],[1,5,4],[1,2,3],[1,2,1],[1,5,4],[1,51,1],[1,23,3],[1,2,1],[1,2,1],[1,1,2],[1,7,2],[1,3,1],[1,1,1],[1,4,1],[1,2,1],[1,7,6],[1,8,1],[1,11,1],[1,2,6],[1,2,1],[1,2,1],[1,1,1],[1,26,1],[1,3,1],[1,2,1],[1,2,1],[1,2,1],[1,12,2],[1,1,3],[1,3,1],[1,2,4],[1,19,3],[1,3,1],[1,3,2],[1,49,3],[1,2,1],[1,21,3],[1,1,1],[1,5,1],[1,4,1],[1,2,2],[1,2,1],[1,1,1],[1,7,4],[1,2,1],[1,2,1],[1,2,1],[1,3,2],[1,26,2],[1,9,1],[1,2,2],[1,12,1],[1,4,32],[1,4,1],[1,17,1],[1,1,2],[1,77,4],[1,2,1],[1,12,1],[1,2,1],[1,2,4],[1,5,2],[1,10,3],[1,4,3],[1,2,1],[1,1,3],[1,16,4],[1,3,1],[1,40,2],[1,13,1],[1,2,1],[1,6,2],[1,12,2],[1,6,11],[1,6,1],[1,1,1],[1,10,6],[1,1,1],[1,6,5],[1,38,4],[1,2,7],[1,9,1],[1,5,2],[1,3,1],[1,2,1],[1,5,2],[1,4,1],[1,1,1],[1,1,1],[1,4,2],[1,4,3],[1,5,2],[1,1,4],[1,11,4],[1,14,4],[1,4,1],[1,17,2],[1,2,2],[1,39,1],[1,9,21],[1,14,2],[1,4,4],[1,4,3],[1,9,2],[1,1,1],[1,3,2],[1,1,1],[1,1,7],[1,16,4],[1,5,1],[1,2,1],[1,2,1],[1,2,1],[1,98,19],[1,4,1],[1,1,1],[1,5,1],[1,7,1],[1,1,3],[1,9,1],[1,4,2],[1,2,1],[1,7,2],[1,2,1],[1,1,2],[1,1,1],[1,5,2],[1,6,1],[1,11,6],[1,5,4],[1,40,5],[1,1,2],[1,9,1],[1,2,1],[1,6,1],[1,5,1],[1,11,2],[1,4,1],[1,3,17],[1,1,1],[1,1,5],[1,9,5],[1,60,1],[1,3,7],[1,3,4],[1,5,1],[1,3,10],[1,5,2],[1,7,1],[1,2,1],[1,14,14],[1,4,3],[1,1,2],[1,2,4],[1,5,1],[1,11,7],[1,3,1],[1,29,3],[1,2,4],[1,8,1],[1,53,1],[1,10,1],[1,7,2],[1,2,13],[1,58,1],[1,5,6],[1,2,1],[1,4,2],[1,4,2],[1,4,2],[1,5,2],[1,2,3],[1,12,2],[1,4,6],[1,34,1],[1,1,1],[1,8,1],[1,4,1],[1,2,1],[1,2,2],[1,16,1],[1,4,2],[1,3,13],[1,2,2],[1,46,2],[1,4,1],[1,6,1],[1,1,2],[1,2,1],[1,3,6],[1,3,1],[1,19,1],[1,2,1],[1,23,1],[1,3,1],[1,1,1],[1,7,2],[1,4,4],[1,18,3],[1,1,1],[1,7,2],[1,2,2],[1,7,1],[1,2,1],[1,2,1],[1,6,1],[1,9,4],[1,3,1],[1,5,1],[1,13,1],[1,2,2],[1,33,1],[1,12,1],[1,9,3],[1,2,1],[1,1,1],[1,18,1],[1,1,3],[1,3,15],[1,2,4],[1,17,1],[1,1,1],[1,1,1],[1,4,8],[1,1,2],[1,31,19],[1,1,5],[1,7,6],[1,12,4],[1,2,4],[1,7,8],[1,4,2],[1,13,2],[1,19,18],[1,42,4],[1,3,1],[1,17,1],[1,3,3],[1,4,2],[1,12,1],[1,1,6],[1,23,2],[1,3,1],[1,20,1],[1,21,4],[1,1,1],[1,3,2],[1,10,1],[1,9,1],[1,8,6],[1,21,3],[1,5,1],[1,7,6],[1,2,1],[1,5,1],[1,1,2],[1,11,1],[1,8,212],[1,9,3],[1,6,1],[1,1,2],[1,25,12],[1,4,1],[1,14,15],[1,4,1],[1,13,1],[1,2,2],[1,3,1],[1,4,1],[1,3,1],[1,1,1],[1,3,1],[1,9,7],[1,1,1],[1,6,1],[1,8,2],[1,8,1],[1,2,3],[1,3,1],[1,2,3],[1,1,2],[1,10,1],[1,6,1],[1,12,3],[1,12,1],[1,1,1],[1,2,1],[1,2,4],[1,4,1],[1,2,1],[1,1,1],[1,4,1],[1,23,2],[1,4,2],[1,20,1],[1,17,4],[1,8,2],[1,4,6],[1,4,1],[1,6,1],[1,10,1],[1,6,2],[1,1,1],[1,3,1],[1,4,1],[1,4,1],[1,16,143],[1,7,1],[1,10,1],[1,7,2],[1,3,3],[1,8,3],[1,2,1],[1,49,1],[1,2,7],[1,14,4],[1,31,3],[1,29,1],[1,31,8],[1,5,2],[1,7,1],[1,1,1],[1,4,5],[1,1,1],[1,7,3],[1,1,2],[1,5,3],[1,3,1],[1,7,4],[1,129,9],[1,13,1],[1,11,4],[1,6,28],[1,6,1],[1,6,1],[1,20,1],[1,2,1],[1,16,3],[1,3,3],[1,5,1],[1,64,1],[1,4,2],[1,7,1],[1,21,3],[1,2,2],[1,9,1],[1,2,1],[1,5,6],[1,6,6],[1,3,1],[1,5,1],[1,3,1],[1,3,1],[1,6,2],[1,2,3],[1,4,1],[1,1,1],[1,12,37],[1,6,1],[1,1,1],[1,4,2],[1,4,8],[1,6,2],[1,2,2],[1,19,1],[1,1,1],[1,1,3],[1,3,1],[1,4,5],[1,15,2],[1,8,3],[1,1,1],[1,2,2],[1,3,1],[1,10,1],[1,4,1],[1,1,2],[1,19,1],[1,5,2],[1,4,4],[1,3,2],[1,3,17],[1,1,1],[1,1,1],[1,2,1],[1,18,3],[1,3,1],[1,16,4],[1,5,1],[1,11,2],[1,19,8],[1,2,1],[1,2,1],[1,1,6],[1,3,1],[1,2,1],[1,1,1],[1,2,1],[1,11,3],[1,17,4],[1,4,1],[1,4,4],[1,5,2],[1,1,1],[1,1,2],[1,10,12],[1,2,2],[1,8,1],[1,1,2],[1,8,1],[1,17,2],[1,2,1],[1,4,1],[1,6,1],[1,20,21],[1,5,7],[1,3,1],[1,13,2],[1,3,6],[1,8,3],[1,12,1],[1,12,2],[1,3,2],[1,15,2],[1,6,1],[1,9,5],[1,5,3],[1,4,1],[1,7,4],[1,4,4],[1,9,4],[1,11,1],[1,3,1],[1,17,1],[1,71,5],[1,7,1],[1,3,1],[1,5,1],[1,1,1],[1,1,2],[1,2,1],[1,1,2],[1,10,2],[1,3,1],[1,2,2],[1,5,1],[1,28,4],[1,2,1],[1,1,1],[1,9,1],[1,3,2],[1,8,2],[1,13,1],[1,2,1],[1,6,1],[1,25,79],[1,30,24],[1,10,31],[1,5,1],[1,9,1],[1,1,1],[1,4,1],[1,118,14],[1,18,3],[1,30,1],[1,10,3],[1,5,1],[1,5,1],[1,1,1],[1,6,1],[1,9,3],[1,6,2],[1,5,1],[1,2,2],[1,3,1],[1,7,4],[1,8,2],[1,10,2],[1,1,8],[1,41,1],[1,21,4],[1,6,1],[1,13,3],[1,5,1],[1,34,7],[1,22,1],[1,9,8],[1,5,3],[1,11,1],[1,2,1],[1,6,1],[1,4,1],[1,72,1],[1,44,3],[1,2,1],[1,1,1],[1,3,1],[1,8,2],[1,1,3],[1,14,1],[1,3,2],[1,1,1],[1,9,2],[1,17,1],[1,9,35],[1,3,1],[1,6,1],[1,2,11],[1,5,3],[1,1257,55],[1,1,1],[1,2,1],[1,14,7],[1,51,44],[1,3,6],[1,1,1],[1,6,2],[1,2,1],[1,11,2],[1,8,3],[1,3,2],[1,3,3],[1,4,1],[1,2,1],[1,5,1],[1,8,5],[1,60,1],[1,6,3],[1,36,2],[1,1,1],[1,2,1],[1,10,2],[1,26,2],[1,7,3],[1,6,1],[1,6,2],[1,3,3],[1,2,3],[1,6,2],[1,2,2],[1,2,2],[1,5,2],[1,2,1],[1,15,5],[1,1,2],[1,1,3],[1,37,24],[1,8,2],[1,17,2],[1,31,1],[1,14,2],[1,2,1],[1,16,2],[1,3,1],[1,2,2],[1,1,2],[1,2,3],[1,4,2],[1,1,1],[1,9,5],[1,1,2],[1,1,4],[1,4,18],[1,6,1],[1,12,1],[1,3,85],[1,17,2],[1,4,1],[1,7,1],[1,4,1],[1,3,1],[1,22,2],[1,1,1],[1,15,27],[1,4,1],[1,1,1],[1,1,3],[1,3,1],[1,35,2],[1,1,1],[1,33,4],[1,2,1],[1,3,3],[1,6,1],[1,9,1],[1,8,1],[1,6,1],[1,16,2],[1,20,2],[1,5,1],[1,1,5],[1,2,2],[1,12,25],[1,6,1],[1,13,1],[1,2,1],[1,2,1],[1,10,1],[1,2,1],[1,37,3],[1,2,1],[1,58,11],[1,14,3],[1,6,1],[1,6,1],[1,1,3],[1,1,1],[1,9,2],[1,1,502],[1,45,5],[1,5,1],[1,4,1],[1,2,8],[1,5,1],[1,1,1],[1,7,1],[1,4,1],[1,3,4],[1,1,1],[1,10,1],[1,9,1],[1,13,1],[1,10,8],[1,4,4],[1,7,1],[1,1,2],[1,2,2],[1,9,2],[1,13,2],[1,8,1],[1,1,1],[1,2,4],[1,29,1],[1,8,2],[1,7,3],[1,30,7],[1,1,1],[1,10,10],[1,3,1],[1,1,1],[1,5,1],[1,4,3],[1,7,1],[1,43,8],[1,1,2],[1,9,1],[1,1,1],[1,3,6],[1,9,1],[1,1,1],[1,7,1],[1,6,1],[1,2,2],[1,13,4],[1,13,3],[1,2,3],[1,8,1],[1,11,2],[1,9,53],[1,2,1],[1,16,1],[1,6,3],[1,48,3],[1,4,1],[1,7,3],[1,2,2],[1,8,1],[1,8,1],[1,26,2],[1,3,1],[1,8,2],[1,121,2],[1,2,2],[1,8,1],[1,2,2],[1,4,2],[1,8,1],[1,1,1],[1,4,1],[1,3,3],[1,7,1],[1,7,2],[1,2,1],[1,8,2],[1,34,28],[1,3,2],[1,3,1],[1,5,1],[1,9,1],[1,7,1],[1,14,4],[1,1,1],[1,34,4],[1,1,1],[1,6,1],[1,3,1],[1,2,1],[1,4,1],[1,5,2],[1,10,1],[1,41,5],[1,7,2],[1,19,4],[1,3,3],[1,12,3],[1,7,1],[1,4,2],[1,16,1],[1,3,1],[1,8,4],[1,9,2],[1,8,2],[1,2,1],[1,10,2],[1,8,1],[1,16,2],[1,7,2],[1,5,1],[1,2,3],[1,15,4],[1,3,5],[1,4,4],[1,1,1],[1,3,2],[1,5,1],[1,8,4],[1,4,1],[1,41,7],[1,2,1],[1,1,3],[1,1,6],[1,2,1],[1,10,2],[1,10,2],[1,3,3],[1,39,4],[1,1,2],[1,5,7],[1,12,2],[1,15,5],[1,4,1],[1,13,1],[1,3,1],[1,44,3],[1,1,2],[1,1,1],[1,6,1],[1,3,1],[1,3,2],[1,7,15],[1,1,1],[1,11,4],[1,3,1],[1,1,3],[1,1,1],[1,2,1],[1,9,4],[1,22,1],[1,46,2],[1,3,18],[1,22,8],[1,3,1],[1,4,10],[1,12,16],[1,2,1],[1,8,3],[1,1,1],[1,2,4],[1,1,1],[1,6,4],[1,7,1],[1,7,4],[1,14,4],[1,1,1],[1,13,2],[1,61,1],[1,6,2],[1,16,1],[1,14,7],[1,9,2],[1,18,2],[1,9,3],[1,1,2],[1,4,1],[1,6,1],[1,6,4],[1,10,1],[1,5,2],[1,7,1],[1,3,1],[1,11,2],[1,53,1],[1,10,2],[1,17,1],[1,2,2],[1,5,14],[1,17,1],[1,2,1],[1,5,1],[1,28,2],[1,8,2],[1,4,1],[1,4,2],[1,21,1],[1,3,1],[1,3,2],[1,5,2],[1,5,1],[1,3,13],[1,13,2],[1,124,753],[1,2,2],[1,43,1],[1,6,1],[1,2,2],[1,11,1],[1,22,1],[1,5,2],[1,5,1],[1,8,1],[1,2,4],[1,2,2],[1,9,1],[1,6,1],[1,2,1],[1,6,1],[1,14,3],[1,21,1],[1,3,4],[1,3,3],[1,3,1],[1,2,2],[1,2,2],[1,5,2],[1,11,1],[1,6,1],[1,3,1],[1,64,1],[1,6,1],[1,2,12],[1,5,1],[1,6,4],[1,10,1],[1,14,1],[1,14,1],[1,2,1],[1,2,1],[1,8,4],[1,17,2],[1,5,3],[1,64,1],[1,33,3],[1,18,2],[1,1,1],[1,42,9],[1,20,2],[1,10,2],[1,2,2],[1,3,1],[1,13,1],[1,5,1],[1,39,5],[1,8,2],[1,6,1],[1,3,2],[1,12,1],[1,2,4],[1,8,1],[1,2,1],[1,4,5],[1,7,1],[1,2,1],[1,2,1],[1,5,2],[1,15,3],[1,6,1],[1,1,1],[1,11,2],[1,4,2],[1,1,1],[1,7,3],[1,7,2],[1,3,1],[1,3,1],[1,2,1],[1,8,3],[1,3,1],[1,7,12],[1,8,1],[1,4,2],[1,6,2],[1,9,1],[1,3,30],[1,8,3],[1,8,2],[1,8,1],[1,11,1],[1,13,1],[1,2,1],[1,16,1],[1,10,1],[1,3,1],[1,6,4],[1,29,2],[1,4,2],[1,4,1],[1,1,1],[1,7,1],[1,1,1],[1,4,11],[1,1,1],[1,6,1],[1,26,1],[1,3,1],[1,2,1],[1,10,1],[1,4,1],[1,14,2],[1,10,1],[1,5,2],[1,5,1],[1,2,1],[1,26,33],[1,1,1],[1,11,2],[1,8,5],[1,18,1],[1,2,1],[1,5,1],[1,4,2],[1,5,1],[1,11,2],[1,1,2],[1,2,2],[1,6,6],[1,10,1],[1,14,1],[1,2,1],[1,13,1],[1,14,1],[1,8,2],[1,21,2],[1,1,2],[1,1,1],[1,14,1],[1,2,1],[1,15,2],[1,4,1],[1,3,1],[1,10,2],[1,4,2],[1,5,1],[1,11,22],[1,8,3],[1,4,1],[1,3,2],[1,1,2],[1,25,3],[1,2,1],[1,11,2],[1,5,2],[1,39,1],[1,1,1],[1,415,128],[1,6,1],[1,5,1],[1,8,5],[1,2,3],[1,1,1],[1,1,1],[1,4,1],[1,2,4],[1,4,1],[1,2,9],[1,4,2],[1,23,3],[1,6,9],[1,5,4],[1,2,5],[1,1,1],[1,7,1],[1,3,7],[1,1,2],[1,2,16],[1,5,2],[1,1,3],[1,4,1],[1,11,1],[1,2,2],[1,2,1],[1,10,1],[1,6,2],[1,11,1],[1,28,1],[1,21,3],[1,3,2],[1,3,1],[1,4,1],[1,1,2],[1,7,1],[1,11,4],[1,4,2],[1,22,4],[1,1,1],[1,1,1],[1,12,7],[1,1,1],[1,4,2],[1,2,1],[1,6,4],[1,14,3],[1,8,2],[1,1,11],[1,13,2],[1,4,1],[1,3,2],[1,95,10],[1,1,2],[1,4,2],[1,27,2],[1,2,1],[1,19,1],[1,13,4],[1,1,1],[1,37,1],[1,4,1],[1,5,1],[1,7,5],[1,1,1],[1,4,5],[1,5,1],[1,1,1],[1,16,2],[1,22,1],[1,4,2],[1,24,4],[1,10,1],[1,77,6],[1,21,1],[1,11,1],[1,2,1],[1,1,1],[1,4,5],[1,2,4],[1,55,4],[1,17,1],[1,1,3],[1,2,2],[1,7,1],[1,17,1],[1,34,2],[1,4,1],[1,2,2],[1,1,2],[1,100,1],[1,17,2],[1,8,6],[1,11,2],[1,11,2],[1,3,1],[1,5,2],[1,1,1],[1,6,7],[1,15,5],[1,7,1],[1,4,1],[1,5,1],[1,6,2],[1,7,1],[1,2,2],[1,10,2],[1,17,1],[1,10,2],[1,6,3],[1,21,1],[1,2,1],[1,78,4],[1,6,1],[1,1,2],[1,5,1],[1,186,9],[1,16,3],[1,15,13],[1,30,4],[1,2,1],[1,15,3],[1,13,1],[1,3,1],[1,1,1],[1,2,2],[1,5,5],[1,7,1],[1,16,1],[1,2,1],[1,14,2],[1,11,5],[1,9,1],[1,13,2],[1,2,1],[1,4,64],[1,4,1],[1,18,4],[1,3,1],[1,1,1],[1,16,2],[1,4,1],[1,11,4],[1,9,3],[1,3,1],[1,4,1],[1,1,1],[1,10,3],[1,7,1],[1,13,1],[1,16,4],[1,1,16],[1,2,2],[1,18,6],[1,42,2],[1,1,3],[1,15,1],[1,3,1],[1,43,1],[1,1,1],[1,27,2],[1,1,3],[1,1,5],[1,13,1],[1,1,1],[1,10,11],[1,8,1],[1,9,1],[1,13,1],[1,1,2],[1,13,3],[1,1,1],[1,5,1],[1,14,2],[1,14,1],[1,13,1],[1,4,3],[1,25,1],[1,1,3],[1,3,3],[1,4,1],[1,1,1],[1,4,4],[1,15,1],[1,2,1],[1,1,1],[1,7,12],[1,68,2],[1,13,2],[1,2,1],[1,6,4],[1,46,6],[1,1,1],[1,2,2],[1,4,1],[1,2,1],[1,11,5],[1,1,1],[1,9,1],[1,9,1],[1,13,1],[1,4,1],[1,14,1],[1,42,9],[1,5,1],[1,4,1],[1,24,7],[1,7,1],[1,17,1],[1,2,1],[1,2,5],[1,3,6],[1,2,1],[1,15,4],[1,3,2],[1,33,2],[1,30,4],[1,27,4],[1,1,1],[1,14,4],[1,2,3],[1,26,7],[1,22,1],[1,2,2],[1,2,2],[1,166,3],[1,4,4],[1,9,1],[1,12,15],[1,2,6],[1,13,2],[1,4,3],[1,9,2],[1,2,3],[1,3,3],[1,9,2],[1,22,1],[1,5,3],[1,3,4],[1,2,3],[1,3,1],[1,23,1],[1,18,1],[1,6,1],[1,4,1],[1,9,3],[1,35,1],[1,73,2],[1,1,3],[1,31,5],[1,25,1],[1,3,4],[1,11,1],[1,9,4],[1,2,1],[1,27,36],[1,23,5],[1,4,2],[1,1,2],[1,29,2],[1,3,2],[1,1,1],[1,4,1],[1,12,1],[1,36,16],[1,5,14],[1,19,1],[1,6,1],[1,6,1],[1,4,1],[1,6,1],[1,4,2],[1,9,7],[1,7,1],[1,30,4],[1,4,1],[1,18,3],[1,2,2],[1,3,1],[1,9,2],[1,2,2],[1,1,2],[1,1,2],[1,14,1],[1,3,1],[1,5,2],[1,10,1],[1,9,1],[1,10,3],[1,4,1],[1,2,1],[1,4,4],[1,2,1],[1,3,3],[1,39,2],[1,3,1],[1,1,3],[1,14,1],[1,2,4],[1,13,1],[1,4,6],[1,3,5],[1,5,4],[1,8,1],[1,131,1],[1,28,1],[1,5,1],[1,965,1303],[1,8,5],[1,2,9],[1,4,2],[1,5,1],[1,46,3],[1,7,3],[1,1,1],[1,7,3],[1,2,1],[1,4,1],[1,2,1],[1,2,1],[1,2,1],[1,4,6],[1,5,1],[1,9,3],[1,2,2],[1,9,1],[1,42,3],[1,11,3],[1,5,1],[1,1,2],[1,6,1],[1,37,51],[1,2,1],[1,4,3],[1,23,2],[1,1,15],[1,5,4],[1,1,4],[1,18,3],[1,12,3],[1,4,2],[1,4,1],[1,2,7],[1,2,6],[1,3,6],[1,6,1],[1,10,3],[1,4,2],[1,1,2],[1,4,1],[1,4,3],[1,1,3],[1,3,1],[1,6,2],[1,10,2],[1,6,4],[1,4,3],[1,7,2],[1,2,2],[1,4,1],[1,1,1],[1,4,5],[1,14,1],[1,20,4],[1,7,15],[1,18,2],[1,6,1],[1,1,1],[1,7,1],[1,5,2],[1,6,2],[1,4,1],[1,6,3],[1,2,1],[1,6,1],[1,4,1],[1,7,1],[1,7,4],[1,7,1],[1,1,1],[1,24,4],[1,2,2],[1,3,5],[1,8,1],[1,15,2],[1,5,1],[1,2,3],[1,2,2],[1,4,1],[1,6,1],[1,2,3],[1,11,1],[1,23,5],[1,2,2],[1,1,1],[1,8,1],[1,17,6],[1,1,1],[1,9,2],[1,1,1],[1,10,1],[1,5,1],[1,6,1],[1,6,1],[1,5,1],[1,2,6],[1,2,1],[1,9,1],[1,14,1],[1,18,8],[1,39,2],[1,13,1],[1,6,1],[1,6,2],[1,9,1],[1,14,1],[1,5,4],[1,26,2],[1,4,1],[1,7,2],[1,5,5],[1,2,1],[1,20,2],[1,14,1],[1,10,1],[1,4,1],[1,3,1],[1,10,2],[1,9,12],[1,4,4],[1,2,1],[1,4,1],[1,4,1],[1,2,1],[1,8,1],[1,2,4],[1,1,1],[1,33,2],[1,4,1],[1,5,1],[1,205,1],[1,2,1],[1,15,3],[1,5,1],[1,1,1],[1,1,1],[1,1,1],[1,13,1],[1,14,5],[1,6,4],[1,3,1],[1,7,5],[1,42,2],[1,11,1],[1,24,2],[1,11,2],[1,11,2],[1,12,1],[1,7,1],[1,1,1],[1,3,2],[1,21,1],[1,13,1],[1,2,1],[1,37,6],[1,8,4],[1,2,2],[1,2,2],[1,36,1],[1,8,1],[1,19,11],[1,19,7],[1,8,1],[1,18,2],[1,7,2],[1,8,1],[1,1,1],[1,4,1],[1,3,3],[1,10,1],[1,6,1],[1,4,1],[1,10,1],[1,25,1],[1,14,1],[1,14,3],[1,4,1],[1,2,1],[1,2,2],[1,4,2],[1,3,4],[1,62,11],[1,4,1],[1,39,3],[1,65,2],[1,3,1],[1,11,2],[1,4,1],[1,2,2],[1,1,1],[1,2,3],[1,2,1],[1,17,7],[1,7,4],[1,1,4],[1,62,3],[1,17,3],[1,26,3],[1,15,1],[1,2,1],[1,4,6],[1,1,2],[1,8,2],[1,16,2],[1,1,1],[1,7,2],[1,4,1],[1,1,1],[1,7,2],[1,8,2],[1,12,1],[1,1,2],[1,2,1],[1,2,1],[1,26,7],[1,2,1],[1,5,1],[1,5,1],[1,5,1],[1,1,1],[1,6,27],[1,5,4],[1,6,1],[1,8,1],[1,38,2],[1,26,2],[1,13,1],[1,20,2],[1,6,6],[1,2,2],[1,2,1],[1,16,2],[1,88,1],[1,4,1],[1,5,3],[1,1,4],[1,1,4],[1,12,2],[1,3,1],[1,3,1],[1,3,1],[1,2,3],[1,6,1],[1,2,4],[1,28,2],[1,17,3],[1,10,1],[1,51,3],[1,1,1],[1,15,4],[1,10,14],[1,1,3],[1,3,3],[1,1,1],[1,5,1],[1,3,1],[1,23,3],[1,10,1],[1,1,1],[1,21,6],[1,11,1],[1,8,1],[1,1,1],[1,2,1],[1,1,3],[1,26,1],[1,1,2],[1,4,1],[1,4,1],[1,6,1],[1,6,1],[1,2,2],[1,11,5],[1,15,2],[1,13,1],[1,2,2],[1,4,1],[1,4,1],[1,2,6],[1,13,3],[1,23,2],[1,18,2],[1,8,2],[1,1,1],[1,4,1],[1,7,1],[1,2,1],[1,8,6],[1,12,1],[1,23,4],[1,9,4],[1,2,2],[1,8,1],[1,7,2],[1,2,2],[1,2,4],[1,8,16],[1,22,3],[1,2,1],[1,2,4],[1,2,1],[1,9,2],[1,3,3],[1,4,1],[1,3,9],[1,3,1],[1,2,2],[1,2,3],[1,11,1],[1,5,1],[1,5,1],[1,2,2],[1,10,20],[1,2,2],[1,2,1],[1,3,3],[1,10,1],[1,2,3],[1,2,1],[1,5,1],[1,4,2],[1,8,1],[1,2,2],[1,6,1],[1,5,1],[1,9,1],[1,3,2],[1,1,1],[1,2,6],[1,1,1],[1,5,1],[1,2,1],[1,16,1],[1,6,1],[1,2,1],[1,2,1],[1,5,1],[1,9,1],[1,10,16],[1,4,1],[1,4,2],[1,5,2],[1,8,1],[1,16,2],[1,2,1],[1,5,1],[1,1,2],[1,55,2],[1,20,1],[1,11,1],[1,5,2],[1,13,1],[1,1,1],[1,10,6],[1,5,2],[1,21,1],[1,7,3],[1,5,1],[1,7,1],[1,3,1],[1,6,1],[1,46,3],[1,8,5],[1,5,1],[1,2,1],[1,2,6],[1,22,1],[1,42,1],[1,1,1],[1,4,2],[1,13,1],[1,3,3],[1,2,2],[1,4,2],[1,1,3],[1,88,1],[1,24,4],[1,4,1],[1,3,1],[1,5,1],[1,17,6],[1,6,2],[1,20,3],[1,47,2],[1,2,7],[1,13,1],[1,1,3],[1,1,2],[1,2,2],[1,2,2],[1,4,3],[1,7,1],[1,3,1],[1,10,1],[1,2,1],[1,2,5],[1,1,2],[1,17,2],[1,12,4],[1,24,1],[1,3,1],[1,1,3],[1,6,1],[1,2,5],[1,3,1],[1,1,1],[1,13,2],[1,6,1],[1,2,1],[1,10,2],[1,4,1],[1,1,1],[1,18,7],[1,7,2],[1,8,1],[1,5,1],[1,2,1],[1,4,1],[1,2,2],[1,14,1],[1,13,1],[1,10,4],[1,4,4],[1,6,4],[1,4,1],[1,16,2],[1,8,2],[1,3,3],[1,3,1],[1,21,2],[1,7,1],[1,2,1],[1,2,1],[1,2,3],[1,4,1],[1,6,1],[1,28,1],[1,2,7],[1,3,1],[1,23,4],[1,2,1],[1,6,1],[1,2,1],[1,4,1],[1,3,2],[1,1,1],[1,9,2],[1,9,2],[1,2,1],[1,4,2],[1,10,1],[1,12,1],[1,4,2],[1,7,1],[1,2,2],[1,9,1],[1,16,5],[1,31,2],[1,16,2],[1,22,3],[1,2,1],[1,6,1],[1,1,1],[1,6,3],[1,14,2],[1,5,3],[1,81,3],[1,8,2],[1,1,1],[1,61,9],[1,1,4],[1,2,1],[1,11,3],[1,3,5],[1,3,6],[1,4,7],[1,1,2],[1,5,2],[1,2,1],[1,3,2],[1,9,5],[1,9,1],[1,1,3],[1,3,2],[1,13,3],[1,14,1],[1,15,6],[1,6,1],[1,2,1],[1,7,1],[1,2,1],[1,10,2],[1,2,2],[1,14,1],[1,2,2],[1,3,3],[1,3,1],[1,4,1],[1,59,2],[1,5,2],[1,4,2],[1,1,1],[1,2,1],[1,4,1],[1,2,2],[1,5,4],[1,4,1],[1,4,1],[1,10,3],[1,2,2],[1,2,3],[1,8,1],[1,2,1],[1,1,1],[1,18,1],[1,6,1],[1,12,3],[1,5,3],[1,3,1],[1,7,3],[1,10,2],[1,2,23],[1,1,12],[1,1,1],[1,32,3],[1,2,1],[1,4,1],[1,12,2],[1,4,1],[1,3,1],[1,5,1],[1,4,2],[1,4,1],[1,16,2],[1,1,1],[1,4,1],[1,7,1],[1,2,4],[1,8,1],[1,4,4],[1,1,1],[1,1,2],[1,6,3],[1,8,2],[1,23,15],[1,2,2],[1,2,1],[1,2,1],[1,11,1],[1,3,2],[1,9,2],[1,4,2],[1,2,3],[1,34,1],[1,7,1],[1,2,4],[1,65,2],[1,41,3],[1,1,2],[1,1,1],[1,6,1],[1,6,1],[1,7,1],[1,3,1],[1,14,9],[1,6,1],[1,6,5],[1,2,13],[1,5,2],[1,2,1],[1,4,1],[1,17,1],[1,5,1],[1,1,1],[1,3,2],[1,9,1],[1,1,4],[1,48,2],[1,7,1],[1,4,1],[1,3,1],[1,4,2],[1,118,3],[1,2,1],[1,2,4],[1,2,1],[1,12,13],[1,2,1],[1,4,2],[1,4,1],[1,6,1],[1,1,1],[1,7,2],[1,10,1],[1,21,5],[1,5,2],[1,9,1],[1,2,2],[1,1,1],[1,1,1],[1,1,1],[1,3,1],[1,1,1],[1,7,1],[1,83,9],[1,6,2],[1,7,2],[1,13,1],[1,4,2],[1,3,1],[1,8,2],[1,2,1],[1,10,3],[1,2,1],[1,2,1],[1,9,11],[1,2,1],[1,3,1],[1,17,1],[1,7,2],[1,8,2],[1,20,1],[1,2,1],[1,1,2],[1,8,1],[1,2,1],[1,6,1],[1,21,3],[1,1,2],[1,5,5],[1,2,1],[1,2,3],[1,2,1],[1,2,2],[1,16,1],[1,2,1],[1,2,1],[1,3,1],[1,17,1],[1,6,1],[1,4,15],[1,1,1],[1,11,1],[1,84,15],[1,31,3],[1,2,2],[1,8,1],[1,9,1],[1,2,3],[1,15,2],[1,4,1],[1,18,1],[1,3,1],[1,1,1],[1,2,4],[1,2,2],[1,2,1],[1,2,1],[1,25,1],[1,3,1],[1,141,13],[1,4,2],[1,2,2],[1,14,2],[1,7,1],[1,30,9],[1,17,1],[1,1,2],[1,6,1],[1,2,1],[1,2,1],[1,8,1],[1,2,1],[1,10,1],[1,6,3],[1,12,1],[1,68,1],[1,2,1],[1,10,2],[1,14,2],[1,26,9],[1,7,3],[1,3,3],[1,6,6],[1,3,1],[1,18,4],[1,3,1],[1,4,4],[1,2,1],[1,1,1],[1,37,8],[1,8,6],[1,2,1],[1,9,6],[1,5,2],[1,3,1],[1,3,2],[1,2,1],[1,3,1],[1,13,7],[1,9,1],[1,122,2],[1,2,1],[1,22,6],[1,11,2],[1,16,2],[1,28,46],[1,2,4],[1,7,1],[1,2,3],[1,2,6],[1,2,2],[1,1,2],[1,1,1],[1,5,1],[1,1,2],[1,3,2],[1,7,6],[1,11,1],[1,21,1],[1,40,6],[1,14,2],[1,21,1],[1,1,1],[1,14,2],[1,21,1],[1,2,1],[1,1,1],[1,1,2],[1,40,2],[1,4,2],[1,1,3],[1,1,1],[1,107,2],[1,4,6],[1,136,6],[1,5,1],[1,9,1],[1,24,3],[1,7,1],[1,10,5],[1,29,3],[1,12,2],[1,10,3],[1,5,3],[1,2,1],[1,59,1],[1,5,2],[1,13,2],[1,1,2],[1,50,2],[1,1,3],[1,2,3],[1,6,1],[1,4,2],[1,5,4],[1,3,2],[1,8,1],[1,4,2],[1,1,1],[1,17,1],[1,13,3],[1,2,1],[1,7,1],[1,3,1],[1,8,1],[1,1,1],[1,20,1],[1,4,4],[1,1,2],[1,2,1],[1,2,1],[1,2,2],[1,1,2],[1,13,2],[1,4,1],[1,4,1],[1,3,1],[1,2,1],[1,4,4],[1,13,5],[1,9,1],[1,8,1],[1,12,1],[1,15,3],[1,2,1],[1,2,2],[1,4,1],[1,2,2],[1,1,1],[1,3,1],[1,13,1],[1,4,1],[1,9,4],[1,3,2],[1,2,1],[1,4,4],[1,1,3],[1,15,1],[1,4,1],[1,2,1],[1,3,1],[1,2,1],[1,3,6],[1,5,1],[1,7,10],[1,1,2],[1,6,2],[1,7,2],[1,3,1],[1,3,3],[1,6,1],[1,13,1],[1,22,3],[1,6,5],[1,6,1],[1,3,1],[1,3,1],[1,21,5],[1,11,2],[1,6,3],[1,38,4],[1,6,4],[1,4,1],[1,2,1],[1,5,5],[1,5,3],[1,40,1],[1,4,3],[1,8,1],[1,13,2],[1,4,2],[1,1,1],[1,9,9],[1,1,1],[1,12,2],[1,36,1],[1,2,1],[1,18,3],[1,28,1],[1,5,1],[1,20,4],[1,40,3],[1,3,1],[1,5,3],[1,2,1],[1,31,3],[1,6,1],[1,3,1],[1,1,5],[1,3,3],[1,36,1],[1,1,1],[1,22,2],[1,9,2],[1,2,4],[1,2,2],[1,4,4],[1,2,1],[1,6,1],[1,3,3],[1,5,1],[1,13,2],[1,4,1],[1,1,3],[1,1,1],[1,11,5],[1,4,1],[1,2,3],[1,26,1],[1,9,1],[1,6,1],[1,15,1],[1,23,5],[1,3,5],[1,4,3],[1,8,1],[1,9,4],[1,2,1],[1,7,1],[1,1,6],[1,4,1],[1,43,1],[1,2,3],[1,1,1],[1,15,4],[1,3,1],[1,1,1],[1,10,1],[1,79,1],[1,1,14],[1,2,1],[1,6,1],[1,1,1],[1,24,1],[1,2,3],[1,9,2],[1,2,3],[1,8,1],[1,115,15],[1,1,1],[1,1,2],[1,3,1],[1,9,24],[1,6,1],[1,3,6],[1,10,3],[1,3,1],[1,1,1],[1,3,2],[1,2,1],[1,11,1],[1,5,1],[1,1,1],[1,2,1],[1,3,1],[1,5,1],[1,11,1],[1,2,1],[1,7,7],[1,15,1],[1,6,2],[1,51,7],[1,2,1],[1,54,1],[1,5,1],[1,1,1],[1,7,5],[1,1,1],[1,4,1],[1,3,1],[1,22,4],[1,5,3],[1,5,1],[1,64,9],[1,6,1],[1,28,6],[1,5,1],[1,11,1],[1,2,2],[1,4,2],[1,1,4],[1,8,1],[1,1,5],[1,7,1],[1,2,1],[1,2,2],[1,8,1],[1,11,3],[1,8,3],[1,7,1],[1,10,5],[1,5,1],[1,98,5],[1,18,1],[1,1,1],[1,5,1],[1,2,2],[1,14,2],[1,3,1],[1,1,1],[1,11,3],[1,7,9],[1,5,3],[1,3,1],[1,3,3],[1,125,34],[1,1,1],[1,2,1],[1,6,2],[1,2,2],[1,11,7],[1,5,2],[1,5,5],[1,6,1],[1,10,2],[1,14,2],[1,4,3],[1,8,7],[1,2,3],[1,2,2],[1,13,1],[1,6,1],[1,10,5],[1,11,1],[1,4,2],[1,14,1],[1,1,6],[1,15,1],[1,1,3],[1,5,3],[1,7,1],[1,2,1],[1,1,3],[1,2,4],[1,3,1],[1,8,3],[1,2,3],[1,2,1],[1,2,2],[1,2,1],[1,4,1],[1,16,2],[1,1,2],[1,1,5],[1,7,1],[1,3,1],[1,2,1],[1,16,3],[1,4,1],[1,8,2],[1,16,6],[1,12,2],[1,84,26],[1,10,2],[1,2,2],[1,5,1],[1,1,1],[1,8,1],[1,4,1],[1,4,1],[1,4,2],[1,4,1],[1,4,10],[1,14,2],[1,4,2],[1,5,2],[1,19,1],[1,4,3],[1,8,2],[1,6,1],[1,2,5],[1,2,1],[1,16,4],[1,4,1],[1,2,2],[1,7,1],[1,4,2],[1,4,1],[1,8,1],[1,10,2],[1,3,2],[1,3,1],[1,10,2],[1,1,1],[1,12,3],[1,37,1],[1,10,1],[1,16,4],[1,1,1],[1,11,1],[1,4,1],[1,8,6],[1,3,2],[1,66,2],[1,14,1],[1,2,4],[1,2,2],[1,7,2],[1,24,2],[1,5,1],[1,1,1],[1,1,1],[1,3,1],[1,31,2],[1,24,1],[1,8,5],[1,8,2],[1,3,4],[1,64,1],[1,1,4],[1,4,47],[1,8,4],[1,25,1],[1,19,2],[1,4,1],[1,33,4],[1,16,2],[1,4,1],[1,1,1],[1,2,3],[1,27,1],[1,20,1],[1,10,3],[1,2,1],[1,2,1],[1,76,1],[1,2,1],[1,5,1],[1,2,2],[1,15,3],[1,40,2],[1,4,22],[1,2,2],[1,2,2],[1,10,1],[1,3,1],[1,55,4],[1,2,7],[1,7,1],[1,4,6],[1,2,1],[1,2,1],[1,28,1],[1,2,2],[1,6,2],[1,6,2],[1,4,15],[1,3,2],[1,1,1],[1,29,1],[1,13,1],[1,16,1],[1,4,1],[1,7,7],[1,3,3],[1,16,4],[1,12,11],[1,1,1],[1,2,4],[1,54,2],[1,1,2],[1,6,2],[1,1,3],[1,2,2],[1,1,1],[1,2,1],[1,11,4],[1,9,1],[1,20,1],[1,1,1],[1,17,3],[1,1,1],[1,9,2],[1,2,2],[1,3,1],[1,29,19],[1,28,1],[1,8,3],[1,21,8],[1,7,3],[1,6,2],[1,5,2],[1,11,1],[1,1,2],[1,7,1],[1,22,1],[1,9,1],[1,3,3],[1,8,2],[1,5,1],[1,23,2],[1,11,5],[1,17,2],[1,5,5],[1,4,3],[1,33,1],[1,2,3],[1,6,1],[1,32,1],[1,6,2],[1,64,2],[1,3,1],[1,7,1],[1,3,6],[1,12,1],[1,1,1],[1,9,1],[1,38,3],[1,1,1],[1,3,1],[1,3,5],[1,78,16],[1,3,1],[1,7,1],[1,26,1],[1,9,2],[1,113,2],[1,9,1],[1,5,9],[1,3,2],[1,4,1],[1,2,1],[1,5,1],[1,24,3],[1,11,4],[1,38,2],[1,13,3],[1,7,3],[1,1,1],[1,1,2],[1,3,3],[1,5,3],[1,6,1],[1,7,1],[1,3,1],[1,4,2],[1,3,1],[1,3,1],[1,1,2],[1,2,1],[1,18,8],[1,1,3],[1,1,1],[1,2,5],[1,13,9],[1,2,2],[1,6,1],[1,5,1],[1,13,3],[1,7,1],[1,3,2],[1,2,1],[1,4,1],[1,2,2],[1,6,2],[1,4,3],[1,1,3],[1,3,2],[1,12,8],[1,6,1],[1,7,1],[1,6,3],[1,9,4],[1,16,17],[1,1,2],[1,4,1],[1,2,1],[1,2,1],[1,2,1],[1,1,1],[1,4,2],[1,4,1],[1,8,1],[1,14,17],[1,7,1],[1,7,6],[1,5,1],[1,4,2],[1,80,2],[1,13,1],[1,11,1],[1,9,1],[1,2,4],[1,3,1],[1,2,1],[1,5,2],[1,3,1],[1,1,2],[1,12,1],[1,8,5],[1,6,3],[1,17,1],[1,3,4],[1,1,2],[1,5,2],[1,1,3],[1,2,2],[1,2,3],[1,2,1],[1,4,1],[1,1,1],[1,14,1],[1,2,1],[1,16,4],[1,15,2],[1,3,3],[1,8,8],[1,6,1],[1,25,4],[1,6,1],[1,7,3],[1,36,2],[1,2,1],[1,32,2],[1,1,1],[1,7,1],[1,14,2],[1,21,1],[1,3,1],[1,27,7],[1,6,3],[1,1,5],[1,5,4],[1,12,2],[1,2,1],[1,2,1],[1,8,7],[1,8,8],[1,7,1],[1,2,1],[1,4,1],[1,1,7],[1,10,3],[1,17,1],[1,1,1],[1,8,6],[1,29,5],[1,12,2],[1,7,2],[1,7,1],[1,2,2],[1,2,1],[1,2,1],[1,54,9],[1,1,1],[1,12,2],[1,8,1],[1,8,4],[1,39,1],[1,3,3],[1,9,4],[1,6,5],[1,2,1],[1,15,2],[1,18,1],[1,2,2],[1,1,1],[1,1,1],[1,2,4],[1,3,1],[1,6,1],[1,3,3],[1,4,3],[1,3,2],[1,1,1],[1,2,2],[1,16,12],[1,4,2],[1,15,2],[1,6,1],[1,7,1],[1,9,8],[1,70,2],[1,5,1],[1,4,3],[1,24,4],[1,8,6],[1,18,43],[1,23,3],[1,10,1],[1,14,8],[1,6,4],[1,2,1],[1,2,1],[1,1,1],[1,2,1],[1,9,3],[1,6,4],[1,5,3],[1,43,2],[1,5,1],[1,11,1],[1,1,2],[1,5,3],[1,4,2],[1,16,2],[1,16,10],[1,5,1],[1,2,2],[1,2,1],[1,2,3],[1,4,6],[1,3,12],[1,6,1],[1,10,1],[1,1,2],[1,13,1],[1,3,1],[1,5,2],[1,6,1],[1,3,1],[1,2,1],[1,1,1],[1,13,1],[1,20,1],[1,20,2],[1,8,1],[1,5,2],[1,2,2],[1,10,5],[1,1,3],[1,7,2],[1,4,1],[1,15,18],[1,1,4],[1,5,2],[1,4,1],[1,1,11],[1,1,3],[1,4,1],[1,1,1],[1,2,1],[1,2,12],[1,5,1],[1,3,1],[1,25,2],[1,16,1],[1,10,1],[1,18,1],[1,28,3],[1,5,6],[1,4,2],[1,2,2],[1,51,124],[1,4,2],[1,5,1],[1,28,1],[1,4,5],[1,6,2],[1,20,1],[1,7,1],[1,5,3],[1,11,1],[1,4,3],[1,1,1],[1,6,3],[1,5,1],[1,3,1],[1,10,2],[1,64,5],[1,12,12],[1,5,2],[1,6,1],[1,8,2],[1,28,8],[1,19,1],[1,2,1],[1,1,1],[2,6,1],[2,2,2],[2,4,5],[2,11,1],[2,4,1],[2,4,1],[2,14,1],[2,19,2],[2,2,1],[2,6,4],[2,2,1],[2,6,2],[2,4,1],[2,12,2],[2,15,2],[2,5,1],[2,11,1],[2,11,1],[2,2,2],[2,3,3],[2,5,9],[2,2,1],[2,1,1],[2,1,4],[2,2,1],[2,4,1],[2,11,1],[2,6,1],[2,2,2],[2,8,1],[2,81,7],[2,8,1],[2,5,1],[2,6,3],[2,2,2],[2,39,1],[2,5,2],[2,5,2],[2,2,4],[2,10,2],[2,4,2],[2,2,1],[2,6,6],[2,8,2],[2,56,1],[2,9,1],[2,1,1],[2,16,3],[2,5,2],[2,3,2],[2,12,25],[2,4,4],[2,6,2],[2,7,1],[2,30,11],[2,4,1],[2,16,5],[2,8,2],[2,7,2],[2,11,1],[2,7,1],[2,2,1],[2,1,1],[2,2,9],[2,39,6],[2,2,1],[2,2,1],[2,7,1],[2,19,1],[2,11,2],[2,8,2],[2,4,7],[2,2,1],[2,7,1],[2,1,1],[2,4,1],[2,6,1],[2,6,1],[2,2,4],[2,26,37],[2,2,1],[2,13,2],[2,35,10],[2,13,1],[2,6,1],[2,10,2],[2,19,9],[2,7,1],[2,7,1],[2,2,2],[2,1,1],[2,5,2],[2,10,2],[2,6,1],[2,6,1],[2,6,1],[2,2,2],[2,1,1],[2,6,60],[2,8,1],[2,18,1],[2,4,2],[2,1,1],[2,1,1],[2,2,3],[2,21,2],[2,7,2],[2,11,3],[2,14,2],[2,3,2],[2,12,1],[2,1,2],[2,34,1],[2,1,1],[2,16,1],[2,1,1],[2,11,1],[2,14,1],[2,8,1],[2,9,1],[2,8,1],[2,3,1],[2,4,4],[2,4,1],[2,44,3],[2,4,1],[2,19,6],[2,19,2],[2,3,2],[2,17,2],[2,17,4],[2,1,6],[2,5,3],[2,27,6],[2,5,3],[2,6,3],[2,22,2],[2,22,3],[2,13,19],[2,8,1],[2,2,2],[2,7,1],[2,9,3],[2,2,1],[2,11,1],[2,8,1],[2,4,1],[2,8,2],[2,4,1],[2,1,1],[2,16,1],[2,2,1],[2,4,1],[2,9,11],[2,3,3],[2,3,1],[2,1,2],[2,3,1],[2,28,1],[2,8,5],[2,6,2],[2,8,1],[2,1,1],[2,10,1],[2,6,1],[2,55,1],[2,1,1],[2,4,2],[2,3,2],[2,16,4],[2,11,1],[2,2,3],[2,15,1],[2,1,10],[2,8,2],[2,15,1],[2,1,1],[2,7,114],[2,10,3],[2,1,1],[2,5,1],[2,3,3],[2,2,1],[2,1,1],[2,8,1],[2,96,1],[2,10,3],[2,3,2],[2,2,1],[2,1,1],[2,3,1],[2,25,2],[2,3,1],[2,12,4],[2,2,9],[2,3,1],[2,2,1],[2,9,1],[2,12,1],[2,18,1],[2,23,6],[2,9,85],[2,2,8],[2,1,2],[2,26,1],[2,8,2],[2,6,3],[2,1,4],[2,6,1],[2,8,3],[2,9,2],[2,1,1],[2,7,1],[2,1,3],[2,7,1],[2,3,2],[2,10,1],[2,2,2],[2,8,2],[2,4,4],[2,23,2],[2,8,5],[2,1,1],[2,3,3],[2,7,2],[2,1,1],[2,2,1],[2,1,7],[2,10,1],[2,18,1],[2,39,5],[2,13,2],[2,7,2],[2,6,2],[2,9,1],[2,5,1],[2,7,1],[2,35,2],[2,2,2],[2,5,2],[2,1,1],[2,9,2],[2,18,1],[2,2,3],[2,35,1],[2,6,5],[2,2,2],[2,2,1],[2,12,2],[2,1,1],[2,10,1],[2,6,1],[2,2,1],[2,15,2],[2,7,1],[2,5,4],[2,4,1],[2,2,14],[2,2,1],[2,5,3],[2,21,2],[2,10,1],[2,2,1],[2,8,1],[2,16,1],[2,9,2],[2,11,2],[2,1,6],[2,12,2],[2,18,2],[2,2,4],[2,4,3],[2,7,11],[2,3,1],[2,28,5],[2,1,4],[2,8,1],[2,2,5],[2,2,1],[2,3,1],[2,10,2],[2,3,3],[2,2,1],[2,17,1],[2,6,1],[2,16,1],[2,10,16],[2,17,1],[2,4,2],[2,1,1],[2,3,3],[2,7,3],[2,5,1],[2,11,1],[2,13,1],[2,3,1],[2,6,1],[2,5,2],[2,17,2],[2,33,13],[2,2,10],[2,3,5],[2,4,3],[2,5,1],[2,2,4],[2,8,2],[2,14,1],[2,16,1],[2,2,3],[2,19,6],[2,5,1],[2,8,2],[2,7,1],[2,1,1],[2,11,1],[2,2,2],[2,11,10],[2,10,1],[2,14,1],[2,1,7],[2,10,1],[2,34,1],[2,2,1],[2,2,4],[2,9,2],[2,16,1],[2,2,4],[2,8,3],[2,1,2],[2,3,5],[2,13,5],[2,20,1],[2,25,8],[2,9,1],[2,1,1],[2,15,3],[2,6,2],[2,394,278],[2,11,2],[2,1,1],[2,3,15],[2,4,2],[2,3,6],[2,6,3],[2,1,12],[2,2,1],[2,1,3],[2,11,2],[2,20,3],[2,31,9],[2,25,7],[2,15,2],[2,11,31],[2,17,2],[2,5,1],[2,2,2],[2,4,1],[2,6,2],[2,27,2],[2,10,2],[2,1,2],[2,26,5],[2,5,14],[2,12,2],[2,5,2],[2,2,1],[2,2,3],[2,6,1],[2,1,3],[2,9,3],[2,18,1],[2,5,5],[2,29,13],[2,14,1],[2,1,4],[2,3,1],[2,5,1],[2,19,4],[2,11,7],[2,8,3],[2,18,1],[2,3,5],[2,11,1],[2,4,1],[2,10,4],[2,19,2],[2,10,3],[2,12,2],[2,19,9],[2,73,3],[2,13,3],[2,12,1],[2,4,5],[2,55,1],[2,6,6],[2,27,2],[2,2,1],[2,20,1],[2,8,1],[2,1,1],[2,29,2],[2,10,8],[2,5,2],[2,10,2],[2,14,1],[2,10,1],[2,1,1],[2,4,2],[2,5,1],[2,1,4],[2,4,2],[2,9,1],[2,9,4],[2,2,1],[2,4,1],[2,6,2],[2,2,2],[2,10,15],[2,17,1],[2,9,1],[2,9,1],[2,8,2],[2,4,1],[2,4,1],[2,243,2],[2,9,3],[2,12,2],[2,4,3],[2,2,1],[2,1,2],[2,57,4],[2,7,2],[2,8,2],[2,14,2],[2,2,1],[2,6,1],[2,7,2],[2,8,1],[2,4,3],[2,36,5],[2,3,1],[2,1,1],[2,45,8],[2,1,1],[2,2,3],[2,9,1],[2,1,1],[2,13,2],[2,44,6],[2,2,1],[2,36,1],[2,4,1],[2,5,1],[2,3,2],[2,1,1],[2,28,2],[2,9,1],[2,3,3],[2,10,2],[2,16,1],[2,1,1],[2,1,1],[2,13,1],[2,14,3],[2,65,1],[2,7,1],[2,2,1],[2,11,8],[2,4,1],[2,17,1],[2,6,1],[2,15,5],[2,15,1],[2,17,2],[2,8,1],[2,8,1],[2,1,2],[2,5,7],[2,1,1],[2,3,2],[2,2,1],[2,4,1],[2,32,1],[2,3,1],[2,1,1],[2,1,1],[2,2,2],[2,2,1],[2,8,2],[2,11,3],[2,2,3],[2,42,3],[2,5,1],[2,6,2],[2,1,1],[2,9,1],[2,2,2],[2,5,1],[2,2,1],[2,7,1],[2,7,6],[2,6,2],[2,3,1],[2,1,3],[2,15,1],[2,23,1],[2,1,1],[2,3,1],[2,4,2],[2,8,1],[2,2,7],[2,3,4],[2,6,5],[2,4,1],[2,5,3],[2,16,5],[2,11,1],[2,13,1],[2,22,3],[2,10,5],[2,2,2],[2,2,2],[2,6,1],[2,7,1],[2,4,2],[2,4,3],[2,7,3],[2,7,4],[2,1,1],[2,71,9],[2,4,8],[2,33,4],[2,16,2],[2,1,18],[2,15,1],[2,3,1],[2,8,1],[2,6,3],[2,4,2],[2,1,1],[2,7,2],[2,2,8],[2,2,1],[2,8,1],[2,1,3],[2,5,1],[2,2,2],[2,11,1],[2,17,3],[2,118,1],[2,8,4],[2,14,1],[2,3,4],[2,14,1],[2,2,2],[2,4,3],[2,2,1],[2,11,1],[2,8,10],[2,1,2],[2,3,3],[2,2,2],[2,12,1],[2,2,2],[2,26,3],[2,3,2],[2,3,3],[2,19,1],[2,1,13],[2,23,2],[2,3,1],[2,7,4],[2,10,4],[2,2,3],[2,71,3],[2,3,3],[2,23,1],[2,1,1],[2,34,3],[2,62,1],[2,4,1],[2,7,2],[2,2,8],[2,6,1],[2,20,3],[2,26,2],[2,5,2],[2,2,1],[2,7,1],[2,1,1],[2,7,2],[2,28,7],[2,4,1],[2,2,2],[2,4,1],[2,7,1],[2,2,3],[2,3,1],[2,8,3],[2,43,1],[2,2,1],[2,1,4],[2,2,1],[2,13,3],[2,4,2],[2,6,1],[2,17,1],[2,2,8],[2,32,1],[2,11,2],[2,5,2],[2,45,3],[2,9,1],[2,14,2],[2,9,1],[2,2,1],[2,10,5],[2,2,1],[2,13,1],[2,2,2],[2,3,5],[2,2,1],[2,17,3],[2,11,1],[2,15,1],[2,13,4],[2,7,7],[2,10,2],[2,6,4],[2,2,3],[2,1,3],[2,27,2],[2,2,3],[2,2,1],[2,3,1],[2,3,9],[2,3,46],[2,11,1],[2,30,1],[2,5,1],[2,8,8],[2,2,1],[2,1,1],[2,2,1],[2,6,7],[2,1,1],[2,4,1],[2,4,2],[2,15,2],[2,6,7],[2,4,2],[2,5,1],[2,1,4],[2,2,3],[2,1,2],[2,2,2],[2,1,7],[2,15,2],[2,18,3],[2,2,1],[2,6,1],[2,8,1],[2,134,20],[2,26,1],[2,2,2],[2,8,4],[2,1,1],[2,3,1],[2,14,1],[2,3,1],[2,26,1],[2,19,1],[2,1,1],[2,1,1],[2,7,1],[2,5,2],[2,5,8],[2,3,4],[2,1,1],[2,2,2],[2,16,1],[2,7,2],[2,6,1],[2,1,6],[2,4,3],[2,2,2],[2,2,2],[2,2,1],[2,2,1],[2,1,2],[2,8,3],[2,4,1],[2,9,1],[2,18,33],[2,14,1],[2,1,1],[2,3,2],[2,7,1],[2,14,4],[2,4,2],[2,31,7],[2,19,2],[2,11,4],[2,2,1],[2,7,2],[2,2,1],[2,2,3],[2,52,4],[2,4,1],[2,1,1],[2,4,3],[2,11,1],[2,3,2],[2,6,1],[2,10,3],[2,6,1],[2,12,1],[2,10,2],[2,4,2],[2,23,2],[2,3,3],[2,8,1],[2,21,6],[2,2,2],[2,1,1],[2,1,1],[2,16,3],[2,9,2],[2,5,1],[2,2,2],[2,1,4],[2,4,1],[2,1,25],[2,24,2],[2,6,1],[2,3,4],[2,10,4],[2,6,2],[2,35,2],[2,2,2],[2,1,1],[2,25,10],[2,8,1],[2,1,2],[2,1,1],[2,2,1],[2,3,8],[2,2,1],[2,2,1],[2,5,2],[2,4,3],[2,2,8],[2,1,1],[2,4,2],[2,3,3],[2,12,1],[2,3,2],[2,4,1],[2,2,4],[2,7,2],[2,1,1],[2,73,14],[2,90,1],[2,4,1],[2,2,1],[2,1,1],[2,6,3],[2,1,1],[2,4,1],[2,10,3],[2,2,3],[2,1,1],[2,6,1],[2,37,2],[2,10,1],[2,2,2],[2,60,2],[2,16,3],[2,6,1],[2,1,1],[2,3,4],[2,38,5],[2,6,2],[2,2,1],[2,2,1],[2,9,2],[2,11,1],[2,6,1],[2,9,1],[2,2,2],[2,4,3],[2,8,1],[2,3,2],[2,1,9],[2,14,2],[2,8,1],[2,30,4],[2,2,1],[2,31,2],[2,31,1],[2,21,23],[2,1,5],[2,4,1],[2,2,1],[2,5,3],[2,4,2],[2,10,2],[2,2,2],[2,18,1],[2,15,1],[2,2,1],[2,1,2],[2,5,1],[2,13,1],[2,14,4],[2,1,4],[2,5,1],[2,109,3],[2,18,2],[2,1,2],[2,164,114],[2,8,1],[2,2,3],[2,4,1],[2,1,1],[2,10,1],[2,9,2],[2,4,3],[2,1,75],[2,6,1],[2,17,2],[2,3,1],[2,9,1],[2,2,1],[2,21,1],[2,30,3],[2,7,2],[2,2,2],[2,63,5],[2,16,3],[2,6,1],[2,2,8],[2,25,2],[2,31,3],[2,126,21],[2,10,1],[2,2,2],[2,14,7],[2,6,10],[2,4,3],[2,7,1],[2,12,1],[2,2,1],[2,3,2],[2,2,15],[2,1,4],[2,4,1],[2,3,1],[2,4,1],[2,6,2],[2,7,3],[2,2,3],[2,9,2],[2,6,1],[2,2,1],[2,16,1],[2,22,2],[2,10,1],[2,10,4],[2,7,2],[2,13,1],[2,3,1],[2,7,2],[2,23,12],[2,3,1],[2,6,1],[2,4,2],[2,29,2],[2,5,3],[2,8,1],[2,1,1],[2,6,1],[2,3,1],[2,17,2],[2,15,1],[2,2,1],[2,6,1],[2,2,2],[2,30,1],[2,3,1],[2,2,2],[2,2,5],[2,2,1],[2,37,5],[2,6,2],[2,7,6],[2,2,3],[2,3,3],[2,2,5],[2,75,6],[2,2,3],[2,10,1],[2,2,3],[2,7,2],[2,30,1],[2,12,33],[2,1,1],[2,3,4],[2,14,1],[2,9,2],[2,8,1],[2,1,1],[2,9,1],[2,4,1],[2,2,1],[2,7,1],[2,4,1],[2,3,1],[2,4,3],[2,1,1],[2,5,2],[2,3,4],[2,4,2],[2,6,3],[2,13,5],[2,4,2],[2,6,1],[2,2,5],[2,2,3],[2,1,1],[2,14,1],[2,5,1],[2,4,2],[2,9,1],[2,7,6],[2,4,1],[2,19,2],[2,23,1],[2,20,7],[2,9,1],[2,4,1],[2,12,2],[2,9,4],[2,3,2],[2,3,7],[2,3,1],[2,10,2],[2,6,1],[2,7,1],[2,1,1],[2,9,1],[2,6,1],[2,1,1],[2,17,2],[2,9,1],[2,5,2],[2,1,1],[2,11,2],[2,9,1],[2,1,1],[2,3,6],[2,2,1],[2,5,9],[2,12,2],[2,2,1],[2,6,2],[2,17,4],[2,2,2],[2,7,1],[2,596,5],[2,6,1],[2,2,1],[2,58,125],[2,6,1],[2,8,1],[2,2,1],[2,3,1],[2,1,2],[2,11,4],[2,1,1],[2,9,6],[2,2,8],[2,1,1],[2,6,2],[2,1,1],[2,2,1],[2,7,2],[2,7,3],[2,14,2],[2,1,1],[2,18,9],[2,2,5],[2,2,12],[2,8,4],[2,6,4],[2,3,1],[2,19,2],[2,4,1],[2,2,1],[2,4,3],[2,3,1],[2,13,1],[2,1,1],[2,7,1],[2,1,1],[2,8,1],[2,13,14],[2,11,1],[2,31,1],[2,4,1],[2,6,1],[2,3,2],[2,26,1],[2,4,2],[2,1,1],[2,2,2],[2,1,2],[2,1,1],[2,7,1],[2,8,1],[2,6,2],[2,19,13],[2,2,3],[2,8,3],[2,1,6],[2,5,1],[2,1,1],[2,6,1],[2,9,1],[2,2,2],[2,35,1],[2,1,1],[2,27,2],[2,54,2],[2,6,2],[2,5,1],[2,2,1],[2,2,4],[2,2,1],[2,2,1],[2,14,1],[2,9,1],[2,53,17],[2,2,1],[2,10,1],[2,9,1],[2,23,1],[2,7,1],[2,12,4],[2,1,2],[2,8,1],[2,7,4],[2,2,1],[2,2,1],[2,3,1],[2,11,1],[2,2,2],[2,6,1],[2,2,1],[2,18,4],[2,3,4],[2,8,2],[2,13,1],[2,2,1],[2,1,2],[2,14,4],[2,8,11],[2,1,1],[2,8,3],[2,7,3],[2,90,1],[2,20,2],[2,16,1],[2,20,2],[2,3,1],[2,8,10],[2,10,1],[2,10,1],[2,1,1],[2,3,1],[2,5,1],[2,37,3],[2,24,3],[2,10,1],[2,3,1],[2,2,4],[2,4,1],[2,19,2],[2,1,1],[2,5,1],[2,8,1],[2,3,1],[2,1,1],[2,2,1],[2,2,32],[2,2,1],[2,4,1],[2,1,1],[2,2,2],[2,5,1],[2,2,3],[2,25,9],[2,2,1],[2,4,4],[2,2,1],[2,15,1],[2,59,1],[2,3,2],[2,4,1],[2,9,2],[2,3,10],[2,6,1],[2,5,5],[2,8,2],[2,2,2],[2,4,2],[2,10,1],[2,126,1],[2,3,1],[2,8,1],[2,9,2],[2,1,30],[2,25,1],[2,7,3],[2,2,2],[2,1,3],[2,21,1],[2,38,1],[2,48,1],[2,22,1],[2,4,2],[2,55,2],[2,5,1],[2,15,1],[2,14,44],[2,4,1],[2,1,2],[2,2,3],[2,2,1],[2,3,3],[2,6,1],[2,2,1],[2,26,7],[2,4,1],[2,1,2],[2,3,2],[2,6,2],[2,10,1],[2,18,3],[2,2,1],[2,38,2],[2,1,1],[2,8,1],[2,8,1],[2,3,1],[2,4,1],[2,1,1],[2,1,2],[2,4,1],[2,26,2],[2,3,3],[2,2,1],[2,6,1],[2,19,1],[2,3,4],[2,2,1],[2,4,1],[2,11,1],[2,9,1],[2,9,1],[2,9,1],[2,1,1],[2,1,1],[2,7,1],[2,2,1],[2,11,4],[2,10,2],[2,4,1],[2,6,1],[2,4,1],[2,8,1],[2,11,1],[2,1,1],[2,7,1],[2,8,2],[2,9,1],[2,8,1],[2,41,2],[2,2,4],[2,1,6],[2,2,1],[2,6,3],[2,128,5],[2,2,1],[2,13,13],[2,6,1],[2,1,3],[2,3,3],[2,7,2],[2,10,12],[2,2,1],[2,8,1],[2,1,1],[2,7,1],[2,2,1],[2,10,2],[2,11,10],[2,1,1],[2,8,3],[2,4,5],[2,2,1],[2,14,2],[2,4,1],[2,4,1],[2,7,1],[2,6,1],[2,7,3],[2,1,1],[2,2,1],[2,7,2],[2,2,1],[2,6,1],[2,8,1],[2,2,4],[2,6,1],[2,43,1],[2,108,3],[2,8,1],[2,13,1],[2,4,1],[2,10,3],[2,2,1],[2,24,2],[2,1,2],[2,4,2],[2,2,2],[2,40,6],[2,6,2],[2,6,2],[2,4,3],[2,28,5],[2,4,1],[2,15,1],[2,12,1],[2,1,1],[2,27,1],[3,1,1],[3,5,2],[3,16,2],[3,16,3],[3,1,2],[3,98,2],[3,91,7],[3,6,37],[3,4,1],[3,9,1],[3,97,2],[3,6,1],[3,23,3],[3,115,1],[3,2,1],[3,1,1],[3,1,1],[3,14,4],[3,1,1],[3,28,1],[3,1,1],[3,6,1],[3,15,5],[3,3,1],[3,52,1],[3,2,3],[3,3,1],[3,4,5],[3,13,1],[3,16,3],[3,13,1],[3,17,1],[3,4,4],[3,6,7],[3,14,1],[3,32,1],[3,3,3],[3,11,4],[3,1,1],[3,8,6],[3,9,7],[3,2,1],[3,9,2],[3,5,2],[3,26,12],[3,11,3],[3,12,2],[3,4,2],[3,6,2],[3,30,6],[3,1,2],[3,10,1],[3,1,1],[3,4,1],[3,7,1],[3,30,29],[3,2,3],[3,2,2],[3,2,1],[3,11,1],[3,2,3],[3,3,1],[3,9,1],[3,2,2],[3,5,1],[3,1,2],[3,1,13],[3,6,9],[3,1,1],[3,6,2],[3,1,3],[3,4,1],[3,6,1],[3,9,3],[3,1,1],[3,9,2],[3,19,45],[3,2,1],[3,7,8],[3,21,3],[3,6,2],[3,2,1],[3,6,1],[3,5,1],[3,2,1],[3,15,7],[3,2,1],[3,9,3],[3,11,1],[3,4,1],[3,7,1],[3,2,1],[3,19,1],[3,5,1],[3,2,1],[3,1,1],[3,22,3],[3,21,5],[3,13,1],[3,2,1],[3,4,1],[3,23,1],[3,8,1],[3,3,2],[3,2,2],[3,4,1],[3,12,2],[3,5,2],[3,16,8],[3,6,1],[3,1,2],[3,2,1],[3,7,1],[3,6,1],[3,6,3],[3,45,1],[3,4,5],[3,1,2],[3,3,1],[3,2,1],[3,1,1],[3,12,1],[3,8,1],[3,3,1],[3,6,1],[3,2,2],[3,9,2],[3,5,2],[3,2,1],[3,3,1],[3,15,1],[3,11,1],[3,4,1],[3,9,2],[3,3,1],[3,4,1],[3,1,3],[3,6,15],[3,6,3],[3,2,6],[3,1,3],[3,3,2],[3,15,1],[3,6,1],[3,7,1],[3,5,1],[3,9,1],[3,49,2],[3,5,2],[3,9,4],[3,39,1],[3,4,3],[3,1,5],[3,1,2],[3,2,1],[3,14,2],[3,4,3],[3,18,1],[3,5,4],[3,19,3],[3,3,1],[3,2,1],[3,3,2],[3,48,10],[3,1,1],[3,5,6],[3,12,3],[3,1,2],[3,5,4],[3,4,1],[3,4,1],[3,5,1],[3,1,1],[3,10,1],[3,10,2],[3,6,3],[3,2,7],[3,4,1],[3,9,2],[3,1,1],[3,2,1],[3,4,6],[3,1,1],[3,25,9],[3,11,1],[3,2,1],[3,8,2],[3,1,1],[3,9,3],[3,4,6],[3,1,7],[3,1,1],[3,4,1],[3,11,2],[3,14,1],[3,65,2],[3,6,1],[3,5,2],[3,2,2],[3,13,1],[3,2,5],[3,2,1],[3,4,2],[3,25,1],[3,2,1],[3,2,3],[3,9,1],[3,5,5],[3,46,1],[3,6,2],[3,12,9],[3,4,4],[3,2,3],[3,13,5],[3,39,16],[3,3,1],[3,1,2],[3,68,14],[3,5,1],[3,11,1],[3,7,1],[3,4,1],[3,53,11],[3,4,3],[3,4,1],[3,2,1],[3,4,1],[3,1,1],[3,1,2],[3,8,4],[3,5,1],[3,6,5],[3,6,13],[3,403,3],[3,23,1],[3,3,3],[3,14,1],[3,10,1],[3,3,2],[3,46,11],[3,4,3],[3,29,1],[3,41,2],[3,11,1],[3,15,3],[3,11,2],[3,6,1],[3,3,1],[3,17,2],[3,14,3],[3,5,4],[3,2,1],[3,2,1],[3,5,6],[3,6,1],[3,54,2],[3,2,1],[3,4,2],[3,1,1],[3,7,1],[3,8,34],[3,7,1],[3,1,2],[3,3,2],[3,2,5],[3,1,1],[3,15,12],[3,13,1],[3,5,1],[3,1,1],[3,5,1],[3,39,1],[3,26,9],[3,11,1],[3,6,1],[3,2,1],[3,19,4],[3,4,5],[3,10,1],[3,11,6],[3,4,1],[3,38,1],[3,1,1],[3,1,3],[3,2,1],[3,5,10],[3,4,1],[3,18,2],[3,4,1],[3,19,1],[3,1,1],[3,8,6],[3,1,1],[3,9,1],[3,8,3],[3,15,4],[3,9,3],[3,13,1],[3,10,1],[3,1,2],[3,5,4],[3,4,2],[3,4,1],[3,28,1],[3,6,2],[3,9,1],[3,1,2],[3,2,2],[3,25,1],[3,5,8],[3,5,3],[3,8,2],[3,2,1],[3,14,5],[3,2,1],[3,11,3],[3,10,1],[3,2,2],[3,1,1],[3,3,1],[3,9,1],[3,39,9],[3,27,2],[3,1,1],[3,1,3],[3,12,3],[3,6,1],[3,14,2],[3,17,3],[3,198,1],[3,3,1],[3,5,1],[3,1,1],[3,2,4],[3,12,1],[3,31,1],[3,8,14],[3,25,2],[3,16,2],[3,18,2],[3,2,3],[3,2,3],[3,6,28],[3,22,3],[3,6,1],[3,8,2],[3,4,3],[3,3,3],[3,8,1],[3,1,1],[3,1,2],[3,1,1],[3,1,1],[3,1,2],[3,6,2],[3,2,3],[3,4,1],[3,3,1],[3,1,1],[3,3,2],[3,8,10],[3,6,1],[3,2,1],[3,2,1],[3,5,1],[3,29,6],[3,10,1],[3,3,8],[3,1,3],[3,2,2],[3,3,1],[3,3,4],[3,5,19],[3,15,1],[3,65,1],[3,2,2],[3,60,3],[3,52,1],[3,1,1],[3,4,2],[3,4,1],[3,6,1],[3,7,4],[3,1,1],[3,13,1],[3,8,3],[3,13,1],[3,6,1],[3,3,2],[3,14,1],[3,2,2],[3,4,1],[3,1,1],[3,11,29],[3,7,1],[3,21,6],[3,4,1],[3,1,1],[3,2,1],[3,9,1],[3,2,4],[3,3,1],[3,2,3],[3,1,2],[3,3,2],[3,3,4],[3,16,2],[3,9,2],[3,2,1],[3,17,8],[3,9,4],[3,7,1],[3,6,4],[3,1,2],[3,2,1],[3,4,4],[3,2,1],[3,3,1],[3,3,1],[3,11,1],[3,2,2],[3,2,1],[3,2,3],[3,2,2],[3,10,6],[3,10,4],[3,1,1],[3,8,3],[3,29,2],[3,7,1],[3,2,1],[3,4,1],[3,11,1],[3,2,1],[3,2,2],[3,13,3],[3,4,1],[3,3,1],[3,2,4],[3,18,1],[3,12,1],[3,6,3],[3,3,1],[3,5,1],[3,3,2],[3,9,2],[3,5,1],[3,5,1],[3,11,1],[3,1,1],[3,39,18],[3,3,2],[3,4,1],[3,17,2],[3,14,2],[3,10,6],[3,1,1],[3,4,5],[3,2,1],[3,4,6],[3,12,1],[3,106,80],[3,32,1],[3,7,1],[3,8,1],[3,2,1],[3,33,2],[3,33,7],[3,10,1],[3,3,2],[3,4,3],[3,16,3],[3,7,1],[3,8,1],[3,16,1],[3,8,1],[3,8,1],[3,30,1],[3,7,1],[3,2,1],[3,3,10],[3,27,1],[3,2,1],[3,1,3],[3,2,1],[3,23,1],[3,1,1],[3,5,2],[3,6,1],[3,2,1],[3,2,13],[3,1,3],[3,6,2],[3,5,1],[3,26,1],[3,4,5],[3,2,1],[3,9,1],[3,6,1],[3,2,1],[3,21,2],[3,15,1],[3,4,2],[3,2,1],[3,30,1],[3,4,2],[3,2,1],[3,2,58],[3,8,2],[3,13,1],[3,16,2],[3,10,6],[3,6,1],[3,6,1],[3,2,6],[3,1,1],[3,2,4],[3,11,9],[3,25,2],[3,4,2],[3,1,1],[3,9,9],[3,1,9],[3,3,3],[3,4,1],[3,2,3],[3,5,2],[3,2,7],[3,2,1],[3,2,1],[3,6,3],[3,3,4],[3,1,2],[3,4,3],[3,7,118],[3,7,1],[3,6,1],[3,3,1],[3,1,15],[3,1,2],[3,4,2],[3,2,1],[3,4,1],[3,6,1],[3,23,1],[3,1,1],[3,3,1],[3,4,1],[3,10,3],[3,2,2],[3,6,5],[3,8,1],[3,3,1],[3,4,1],[3,20,2],[3,14,2],[3,7,1],[3,21,29],[3,10,2],[3,10,2],[3,3,3],[3,2,1],[3,3,2],[3,24,3],[3,3,1],[3,9,1],[3,6,1],[3,22,1],[3,13,1],[3,5,2],[3,1,1],[3,9,1],[3,10,2],[3,4,1],[3,7,1],[3,2,1],[3,12,4],[3,48,2],[3,43,1],[3,6,1],[3,1,1],[3,4,1],[3,14,10],[3,2,1],[3,1,1],[3,1,1],[3,3,1],[3,11,5],[3,36,1],[3,4,49],[3,11,1],[3,8,1],[3,2,2],[3,3,1],[3,3,1],[3,8,3],[3,15,8],[3,30,9],[3,23,5],[3,10,1],[3,7,6],[3,1,1],[3,9,2],[3,6,1],[3,3,1],[3,3,1],[3,2,1],[3,21,1],[3,13,2],[3,4,2],[3,9,2],[3,8,1],[3,2,2],[3,4,2],[3,1,1],[3,9,2],[3,32,2],[3,2,2],[3,10,1],[3,1,4],[3,4,3],[3,14,3],[3,5,2],[3,2,1],[3,3,1],[3,5,3],[3,14,3],[3,2,3],[3,6,1],[3,4,1],[3,1,1],[3,16,1],[3,3,1],[3,2,1],[3,5,1],[3,33,1],[3,3,1],[3,14,4],[3,8,3],[3,12,2],[3,14,1],[3,2,1],[3,1,1],[3,13,2],[3,8,1],[3,9,1],[3,17,1],[3,14,2],[3,16,1],[3,12,4],[3,2,1],[3,2,2],[3,20,1],[3,2,2],[3,8,4],[3,7,3],[3,8,1],[3,1,2],[3,5,5],[3,29,1],[3,1,1],[3,2,1],[3,8,2],[3,2,1],[3,7,9],[3,3,2],[3,7,1],[3,6,1],[3,6,2],[3,1,26],[3,3,3],[3,7,1],[3,2,2],[3,8,2],[3,7,1],[3,3,1],[3,4,4],[3,11,1],[3,5,15],[3,28,1],[3,3,8],[3,3,3],[3,2,4],[3,6,4],[3,3,2],[3,2,2],[3,5,1],[3,12,2],[3,10,2],[3,1,1],[3,6,1],[3,2,1],[3,3,2],[4,8,1],[4,3,1],[4,23,1],[4,4,9],[4,6,2],[4,9,1],[4,9,6],[4,5,9],[4,8,1],[4,2,1],[4,2,3],[4,8,1],[4,1,1],[4,4,1],[4,8,1],[4,2,1],[4,16,1],[4,1,8],[4,4,1],[4,1,3],[4,18,1],[4,2,1],[4,4,9],[4,2,1],[4,3,1],[4,9,2],[4,2,1],[4,7,3],[4,5,4],[4,27,2],[4,1,1],[4,8,2],[4,7,1],[4,8,1],[4,9,4],[4,3,2],[4,6,4],[4,2,2],[4,13,5],[4,8,1],[4,10,2],[4,1,1],[4,2,1],[4,1,2],[4,6,2],[4,5,2],[4,8,2],[4,16,2],[4,7,2],[4,102,5],[4,2,2],[4,1,1],[4,2,1],[4,1,2],[4,2,1],[4,29,4],[4,2,1],[4,1,1],[4,1,4],[4,3,2],[4,6,1],[4,19,2],[4,4,3],[4,1,12],[4,1,1],[4,62,3],[4,14,1],[4,1,1],[4,1,1],[4,7,4],[4,9,1],[4,15,1],[4,16,15],[4,2,2],[4,2,1],[4,41,3],[4,7,8],[4,7,3],[4,5,1],[4,9,1],[4,6,1],[4,1,3],[4,15,1],[4,5,4],[4,28,2],[4,11,3],[4,15,1],[4,1,1],[4,1,1],[4,12,1],[4,16,4],[4,12,5],[4,5,2],[4,8,4],[4,124,115],[4,11,3],[4,46,10],[4,4,1],[4,3,1],[4,2,1],[4,27,1],[4,1,1],[4,20,1],[4,2,1],[4,4,1],[4,53,1],[4,18,1],[4,1,1],[4,8,2],[4,3,1],[4,2,1],[4,5,1],[4,2,3],[4,2,5],[4,3,1],[4,8,1],[4,2,5],[4,8,2],[4,9,2],[4,48,1],[4,9,1],[4,20,2],[4,4,4],[4,3,2],[4,8,2],[4,6,2],[4,12,6],[4,9,1],[4,3,1],[4,4,1],[4,5,3],[4,5,1],[4,8,4],[4,3,1],[4,7,1],[4,6,2],[4,15,16],[4,6,1],[4,50,4],[4,23,4],[4,9,7],[4,8,2],[4,1,1],[4,2,1],[4,9,1],[4,12,1],[4,4,3],[4,2,2],[4,42,4],[4,1,1],[4,6,1],[4,11,10],[4,6,11],[4,7,1],[4,4,2],[4,4,2],[4,6,1],[4,59,4],[4,1,1],[4,2,7],[4,12,20],[4,11,3],[4,4,1],[4,12,3],[4,6,3],[4,7,2],[4,17,4],[4,106,8],[4,6,2],[4,7,1],[4,1,1],[4,8,1],[4,4,6],[4,3,1],[4,4,3],[4,14,3],[4,15,2],[4,4,1],[4,44,91],[4,7,2],[4,3,2],[4,2,1],[4,23,2],[4,30,1],[4,2,2],[4,10,1],[4,6,9],[4,6,2],[4,3,2],[4,3,2],[4,20,1],[4,4,1],[4,18,2],[4,12,1],[4,20,14],[4,10,1],[4,3,1],[4,2,1],[4,3,2],[4,3,3],[4,6,3],[4,2,4],[4,8,1],[4,8,5],[4,3,1],[4,10,2],[4,2,1],[4,1,1],[4,10,1],[4,25,2],[4,1,1],[4,4,1],[4,63,2],[4,1,1],[4,4,1],[4,6,7],[4,2,3],[4,8,1],[4,19,2],[4,11,1],[4,30,10],[4,4,4],[4,2,3],[4,2,1],[4,43,29],[4,2,1],[4,1,1],[4,17,1],[4,14,1],[4,13,1],[4,6,4],[4,2,2],[4,1,2],[4,3,1],[4,7,3],[4,4,1],[4,4,1],[4,1,1],[4,13,5],[4,2,1],[4,1,1],[4,5,1],[4,4,2],[4,13,2],[4,10,4],[4,8,1],[4,3,1],[4,2,2],[4,8,3],[4,4,2],[4,6,1],[4,7,1],[4,14,29],[4,19,1],[4,7,1],[4,19,1],[4,24,2],[4,2,1],[4,1,1],[4,28,1],[4,1,1],[4,2,1],[4,3,1],[4,2,1],[4,1,7],[4,2,4],[4,3,1],[4,29,1],[4,2,1],[4,14,1],[4,2,1],[4,28,3],[4,11,3],[4,1,2],[4,21,2],[4,1,1],[4,15,1],[4,17,1],[4,16,1],[4,13,1],[4,2,1],[4,15,5],[4,19,1],[4,17,1],[4,5,3],[4,12,2],[4,33,1],[4,8,1],[4,15,4],[4,2,11],[4,4,1],[4,1,10],[4,39,1],[4,28,1],[4,25,2],[4,1,1],[4,14,2],[4,8,32],[4,9,1],[4,7,1],[4,6,2],[4,1,2],[4,3,1],[4,6,2],[4,12,2],[4,2,2],[4,5,2],[4,18,1],[4,5,3],[4,6,2],[4,25,1],[4,3,16],[4,14,4],[4,2,6],[4,14,2],[4,3,1],[4,4,1],[4,9,3],[4,28,2],[4,9,1],[4,2,1],[4,7,1],[4,2,1],[4,1,4],[4,4,3],[4,1,1],[4,16,6],[4,3,1],[4,10,1],[4,12,3],[4,8,1],[4,4,1],[4,15,2],[4,4,1],[4,2,3],[4,2,9],[4,4,1],[4,7,2],[4,14,1],[4,31,3],[4,13,1],[4,19,2],[4,8,3],[4,2,1],[4,12,1],[4,5,1],[4,45,3],[4,6,1],[4,1,1],[4,12,6],[4,4,3],[4,3,1],[4,5,2],[4,4,4],[4,19,2],[4,8,1],[4,2,1],[4,27,2],[4,73,3],[4,22,2],[4,1,2],[4,7,46],[4,9,2],[4,2,1],[4,524,305],[4,7,1],[4,26,1],[4,2,1],[4,6,1],[4,30,2],[4,6,1],[4,25,92],[4,2,1],[4,13,1],[4,1,4],[4,1,7],[4,6,1],[4,8,2],[4,6,1],[4,4,2],[4,2,6],[4,12,2],[4,2,2],[4,5,2],[4,3,2],[4,13,1],[4,4,1],[4,6,3],[4,14,1],[4,15,1],[4,25,1],[4,3,1],[4,9,4],[4,94,3],[4,11,2],[4,12,4],[4,7,3],[4,3,1],[4,9,2],[4,3,1],[4,2,1],[4,8,3],[4,7,5],[4,2,45],[4,10,1],[4,10,4],[4,5,3],[4,6,6],[5,5,1],[5,2,1],[5,3,3],[5,11,2],[5,28,1],[5,8,1],[5,4,1],[5,4,1],[5,12,1],[5,7,1],[5,1,1],[5,38,7],[5,6,2],[5,4,2],[5,5,1],[5,2,2],[5,2,7],[5,1,4],[5,4,1],[5,4,1],[5,1,2],[5,3,1],[5,7,1],[5,2,1],[5,10,2],[5,4,1],[5,2,1],[5,2,2],[5,3,1],[5,15,78],[5,2,1],[5,1,5],[5,10,1],[5,6,4],[5,10,2],[5,5,1],[5,1,1],[5,1,1],[5,2,2],[5,6,1],[5,2,2],[5,6,2],[5,10,2],[5,3,1],[5,6,2],[5,4,3],[5,16,5],[5,47,48],[5,2,5],[5,6,7],[5,4,2],[5,3,1],[5,2,1],[5,8,1],[5,7,1],[5,2,2],[5,2,1],[5,3,1],[5,7,4],[5,1,1],[5,1,1],[5,8,6],[5,1,4],[5,9,3],[5,11,4],[5,6,1],[5,6,1],[5,2,1],[5,5,1],[5,84,1],[5,2,33],[5,8,1],[5,6,3],[5,5,3],[5,2,1],[5,10,2],[5,3,1],[5,68,9],[5,6,2],[5,21,11],[5,3,4],[5,3,1],[5,16,3],[5,2,2],[5,2,1],[5,14,2],[5,24,2],[5,19,1],[5,1,4],[5,1,1],[5,3,1],[5,6,1],[5,2,1],[5,5,2],[5,4,3],[5,26,3],[5,2,1],[5,6,4],[5,2,1],[5,6,3],[5,5,1],[5,8,3],[5,1,3],[5,9,1],[5,1,2],[5,11,2],[5,23,1],[5,7,1],[5,2,2],[5,3,2],[5,2,1],[5,11,2],[5,8,2],[5,1,1],[5,4,1],[5,2,1],[5,7,1],[5,11,1],[5,1,1],[5,33,1],[5,4,1],[5,5,1],[5,17,3],[5,1,2],[5,18,2],[5,1,2],[5,1,1],[5,2,3],[5,4,2],[5,2,1],[5,13,7],[5,5,1],[5,19,4],[5,23,9],[5,11,6],[5,7,2],[5,10,1],[5,2,1],[5,26,1],[5,3,3],[5,3,2],[5,3,2],[5,15,3],[5,2,1],[5,3,1],[5,4,1],[5,8,1],[5,4,1],[5,23,1],[5,6,1],[5,1,3],[5,124,17],[5,1,1],[5,1,1],[5,15,1],[5,11,2],[5,2,1],[5,2,2],[5,3,2],[5,1,1],[5,6,4],[5,6,1],[5,3,3],[5,6,5],[5,17,1],[5,7,2],[5,5,1],[5,11,1],[5,3,2],[5,36,2],[5,17,7],[5,4,1],[5,7,2],[5,2,1],[5,2,1],[5,2,1],[5,7,10],[5,4,1],[5,1,3],[5,19,2],[5,2,2],[5,3,1],[5,8,3],[5,4,1],[5,15,1],[5,2,3],[5,13,2],[5,1,3],[5,7,1],[5,23,48],[5,9,1],[5,12,10],[5,16,1],[5,10,1],[5,7,5],[5,2,1],[5,3,1],[5,23,2],[5,4,1],[5,18,1],[5,13,2],[5,54,136],[5,6,2],[5,2,2],[5,5,1],[5,6,1],[5,15,8],[5,14,9],[5,4,1],[5,7,2],[5,3,3],[5,117,5],[5,25,8],[5,14,4],[5,25,3],[5,7,1],[5,7,1],[5,15,3],[5,3,2],[5,4,1],[5,6,4],[5,14,4],[5,7,1],[5,20,1],[5,6,5],[5,12,1],[5,9,3],[5,2,1],[5,4,20],[5,4,3],[5,1,1],[5,1,1],[5,8,1],[5,4,1],[5,1,1],[5,6,3],[5,19,1],[5,14,1],[5,22,2],[5,2,1],[5,11,2],[5,1,1],[5,10,1],[5,4,1],[5,23,3],[5,3,1],[5,15,1],[5,8,4],[5,11,4],[5,4,1],[5,2,1],[5,8,6],[5,2,4],[5,2,7],[5,3,2],[5,2,1],[5,1,1],[5,1,1],[5,11,2],[5,4,10],[5,11,4],[5,110,4],[5,6,1],[5,2,1],[5,96,34],[6,4,1],[6,7,3],[6,2,1],[6,6,2],[6,10,1],[6,2,1],[6,10,1],[6,59,2],[6,7,4],[6,4,2],[6,3,1],[6,6,1],[6,1,4],[6,7,3],[6,2,3],[6,1,1],[6,12,1],[6,1,39],[6,28,1],[6,3,4],[6,8,3],[6,4,4],[6,9,2],[6,15,1],[6,10,1],[6,1,1],[6,2,1],[6,7,1],[6,2,1],[6,93,1],[6,14,6],[6,2,2],[6,55,39],[6,15,2],[6,23,3],[6,3,3],[6,35,2],[6,5,15],[6,1,7],[6,8,19],[6,10,10],[6,3,2],[6,6,3],[6,1,2],[6,6,1],[6,2,1],[6,4,1],[6,127,20],[6,20,18],[6,3,1],[6,9,2],[6,2,3],[6,10,1],[6,27,1],[6,9,1],[6,9,1],[6,28,1],[6,1,1],[6,10,1],[6,11,1],[6,5,1],[6,4,1],[6,82,35],[6,2,1],[6,1,1],[6,3,1],[6,2,1],[6,2,11],[6,2,8],[6,3,2],[6,12,3],[6,5,6],[6,42,4],[6,8,1],[6,2,1],[6,2,2],[6,10,3],[6,6,2],[6,48,2],[6,2,3],[6,2,2],[6,2,1],[6,4,1],[6,10,1],[6,1,1],[6,7,1],[6,35,1],[6,17,1],[6,21,2],[6,1,1],[6,4,2],[6,25,1],[6,7,2],[6,12,4],[6,2,6],[6,24,4],[6,2,1],[6,5,1],[6,2,1],[6,2,1],[6,3,2],[6,4,2],[6,2,1],[6,2,1],[6,2,9],[6,2,2],[6,5,1],[6,8,10],[6,1,1],[6,12,2],[6,10,1],[6,4,2],[6,12,4],[6,1,3],[6,3,2],[6,8,1],[6,4,4],[6,12,5],[6,4,2],[6,10,1],[6,1,1],[6,12,1],[6,6,4],[6,2,1],[6,3,2],[6,1,1],[6,3,5],[6,6,1],[6,32,1],[6,10,1],[6,6,5],[6,27,2],[6,7,1],[6,2,1],[6,10,2],[6,5,1],[6,8,2],[6,3,2],[6,9,2],[6,22,1],[6,2,2],[6,10,1],[6,3,4],[6,1,1],[6,3,6],[6,8,2],[6,44,1],[6,1,1],[6,9,7],[6,9,5],[6,19,4],[6,7,1],[6,1,1],[6,10,1],[6,14,2],[6,4,3],[6,4,1],[6,6,1],[6,3,1],[6,4,1],[6,6,3],[6,6,2],[6,6,1],[6,1,3],[6,12,13],[6,3,2],[6,1,4],[6,15,1],[6,39,4],[6,5,1],[6,1,5],[6,11,3],[6,5,7],[6,9,2],[6,1,1],[6,12,1],[6,12,1],[6,1,4],[6,11,1],[6,3,1],[6,6,2],[6,5,2],[6,2,1],[6,1,2],[6,2,1],[6,41,23],[6,3,1],[6,15,1],[6,1,1],[6,1,1],[6,2,2],[6,3,1],[6,10,1],[6,17,6],[6,5,2],[6,30,1],[7,2,2],[7,10,2],[7,8,3],[7,9,4],[7,4,1],[7,8,1],[7,2,1],[7,7,134],[7,16,1],[7,5,3],[7,3,1],[7,6,2],[7,1,1],[7,5,1],[7,5,1],[7,2,1],[7,24,1],[7,8,4],[7,9,2],[7,1,1],[7,6,2],[7,9,2],[7,1,1],[7,5,28],[7,1,1],[7,2,2],[7,7,2],[7,11,1],[7,2,1],[7,17,32],[7,5,1],[7,2,1],[7,3,2],[7,7,4],[7,15,3],[7,3,1],[7,6,2],[7,1,1],[7,2,1],[7,1,1],[7,1,11],[7,2,1],[7,8,1],[7,6,1],[7,2,1],[7,57,1],[7,20,46],[7,6,2],[7,6,1],[7,1,2],[7,28,7],[7,3,5],[7,4,1],[7,4,6],[7,2,2],[7,3,3],[7,2,3],[7,2,1],[7,1,1],[7,2,6],[7,4,1],[7,3,1],[7,23,1],[7,7,2],[7,7,1],[7,4,3],[7,2,1],[7,1,1],[7,4,2],[7,15,2],[7,6,1],[7,2,1],[7,14,1],[7,1,1],[7,1,1],[7,4,2],[7,2,1],[7,4,1],[7,2,1],[7,4,3],[7,22,1],[7,10,1],[7,2,1],[7,1,2],[7,7,2],[7,1,2],[7,12,1],[7,3,1],[7,2,4],[7,3,8],[7,2,1],[7,6,1],[7,5,3],[7,8,2],[7,5,1],[7,6,1],[7,6,1],[7,5,1],[7,9,5],[7,3,1],[7,3,2],[7,3,19],[7,28,3],[7,2,2],[7,3,1],[7,51,4],[7,2,1],[7,2,1],[7,22,2],[7,5,1],[7,2,1],[7,4,2],[7,2,1],[7,6,2],[7,6,1],[7,3,1],[7,37,1],[7,9,1],[7,8,2],[7,2,1],[7,4,1],[7,2,1],[7,18,1],[7,9,2],[7,1,1],[7,5,1],[7,2,1],[7,13,1],[7,45,1],[7,1,3],[7,7,5],[7,16,1],[7,7,1],[7,1,1],[7,3,1],[7,8,1],[7,1,1],[7,1,4],[7,2,2],[7,6,1],[7,6,1],[7,2,1],[7,16,1],[7,11,1],[7,1,1],[7,2,1],[7,3,2],[7,8,8],[7,33,1],[7,2,8],[7,4,1],[7,6,7],[7,12,3],[7,17,1],[7,9,5],[7,3,2],[7,3,2],[7,4,1],[7,1,1],[7,2,2],[7,6,1],[8,9,1],[8,79,3],[8,3,1],[8,14,4],[8,2,4],[8,10,5],[8,7,3],[8,8,1],[8,6,1],[8,7,1],[8,8,2],[8,9,1],[8,30,2],[8,1,1],[8,1,5],[8,15,2],[8,10,3],[8,5,3],[8,1,2],[8,3,1],[8,16,1],[8,3,1],[8,3,3],[8,3,4],[8,2,1],[8,6,2],[8,4,4],[8,5,3],[8,8,4],[8,8,3],[8,4,3],[8,13,7],[8,2,1],[8,2,1],[8,1,1],[8,4,1],[8,10,3],[8,16,9],[8,3,2],[8,1,2],[8,2,5],[8,5,2],[8,156,14],[8,1,1],[8,5,1],[8,252,690],[8,5,1],[8,25,21],[8,1,1],[8,39,12],[8,1,4],[8,6,1],[8,25,7],[8,1,1],[8,7,1],[8,46,11],[8,3,1],[8,1,1],[8,14,1],[8,24,1],[8,16,3],[8,6,3],[8,5,1],[8,1,2],[8,12,2],[8,2,1],[8,2,5],[8,6,1],[8,6,1],[8,14,1],[8,7,1],[8,6,1],[8,4,6],[8,1,2],[8,3,1],[8,2,14],[8,7,12],[8,2,2],[8,25,15],[8,8,3],[8,6,6],[8,5,1],[8,1,1],[8,2,3],[8,18,3],[8,2,2],[8,3,1],[8,4,1],[8,3,3],[8,4,2],[8,12,2],[8,1,1],[8,4,1],[8,18,1],[8,2,2],[8,11,3],[8,5,1],[8,6,1],[8,13,1],[8,6,1],[8,23,1],[8,18,3],[8,13,2],[8,4,1],[8,38,4],[8,1,1],[8,6,1],[8,10,2],[8,2,7],[8,10,7],[8,1,1],[8,4,7],[8,2,1],[8,2,2],[8,7,1],[8,17,1],[8,10,5],[8,4,4],[8,8,4],[8,3,2],[8,2,1],[8,33,1],[8,8,6],[8,15,1],[8,2,1],[8,7,4],[8,6,3],[8,2,1],[8,1,2],[8,3,1],[8,4,1],[8,4,2],[8,27,1],[8,10,1],[9,8,2],[9,2,2],[9,7,1],[9,11,1],[9,35,5],[9,3,1],[9,2,2],[9,6,7],[9,16,2],[9,7,15],[9,3,1],[9,9,1],[9,5,1],[9,3,1],[9,3,1],[9,4,1],[9,2,5],[9,1,1],[9,5,4],[9,1,1],[9,13,1],[9,14,4],[9,3,1],[9,35,3],[9,41,1],[9,8,3],[9,2,5],[9,8,2],[9,13,3],[9,10,1],[9,4,1],[9,35,12],[9,9,1],[9,12,1],[9,4,1],[9,2,4],[9,1,2],[9,6,4],[9,1,4],[9,20,3],[9,4,3],[9,3,3],[9,1,4],[9,2,11],[9,11,2],[9,19,1],[9,5,1],[9,6,2],[9,1,1],[9,3,1],[9,15,3],[9,2,1],[9,6,1],[9,13,1],[9,2,1],[9,11,2],[9,3,5],[9,6,1],[9,16,1],[9,4,1],[9,3,2],[9,3,1],[9,2,5],[9,13,1],[9,3,1],[9,2,2],[9,7,1],[9,2,3],[9,3,4],[9,5,1],[9,4,1],[9,10,2],[9,36,1],[9,7,2],[9,3,1],[9,4,2],[9,5,5],[9,12,1],[9,4,1],[9,2,2],[9,12,1],[9,13,1],[9,12,1],[9,2,4],[9,1,1],[9,1,2],[9,6,6],[9,1,2],[9,8,4],[9,7,2],[9,15,4],[10,3,25],[10,2,1],[10,4,2],[10,8,1],[10,2,1],[10,1,1],[10,21,1],[10,21,19],[10,4,4],[10,4,8],[10,2,1],[10,1,3],[10,3,5],[10,6,1],[10,8,5],[10,4,1],[10,24,5],[10,2,2],[10,24,1],[10,6,4],[10,1,2],[10,25,1],[10,14,1],[10,6,3],[10,2,3],[10,6,1],[10,15,2],[10,54,3],[10,12,1],[10,21,1],[10,7,1],[10,4,4],[10,5,1],[10,10,3],[10,37,1],[10,8,3],[10,11,1],[10,2,4],[10,6,1],[10,30,1],[10,35,1],[10,4,2],[10,2,1],[10,5,2],[10,6,1],[10,4,4],[10,12,1],[10,12,1],[10,44,4],[10,16,3],[10,1,64],[10,27,1],[10,9,3],[10,17,2],[10,25,2],[10,2,2],[10,7,3],[10,89,1],[10,7,30],[10,2,4],[10,2,3],[10,2,1],[10,3,3],[10,11,1],[10,7,1],[10,2,1],[10,4,2],[10,1,1],[10,1,1],[10,6,2],[10,7,3],[10,4,1],[10,2,2],[10,18,1],[10,4,1],[10,19,1],[10,14,6],[10,5,1],[10,5,6],[10,12,1],[11,5,6],[11,15,8],[11,9,1],[11,3,2],[11,6,3],[11,24,4],[11,27,3],[11,2,2],[11,5,9],[11,13,1],[11,3,1],[11,2,25],[11,10,1],[11,4,11],[11,7,2],[11,49,1],[11,4,1],[11,12,1],[11,7,1],[11,1,2],[11,10,6],[11,2,1],[11,4,2],[11,1,2],[11,2,1],[11,5,1],[11,4,3],[11,1,1],[11,6,1],[11,4,3],[11,95,2],[11,8,1],[11,18,1],[11,5,1],[11,16,12],[11,13,2],[11,7,6],[11,56,1],[11,6,1],[11,8,1],[11,21,14],[11,2,7],[11,5,1],[11,1,1],[11,5,2],[11,2,1],[11,15,1],[11,3,3],[11,26,1],[11,6,6],[11,1,1],[11,10,7],[11,6,3],[11,6,1],[11,8,2],[11,1,2],[11,35,2],[11,19,2],[11,8,2],[11,4,1],[11,7,2],[11,4,5],[11,3,5],[11,17,1],[11,3,3],[11,2,1],[11,12,1],[11,2,8],[11,85,1],[11,4,1],[11,9,1],[11,2,2],[11,2,1],[11,6,2],[11,6,3],[11,18,3],[11,1,1],[11,8,1],[11,22,1],[11,7,1],[11,4,2],[11,4,1],[11,8,3],[11,10,4],[11,24,1],[11,10,19],[11,12,8],[12,5,1],[12,1,7],[12,4,1],[12,21,6],[12,12,2],[12,16,1],[12,1,1],[12,2,1],[12,3,1],[12,8,9],[12,1,1],[12,17,2],[12,16,6],[12,14,1],[12,3,3],[12,27,3],[12,2,1],[12,3,3],[12,14,4],[12,1,3],[12,10,1],[12,5,7],[12,7,3],[12,13,5],[12,4,1],[12,47,4],[12,18,1],[12,31,2],[12,8,1],[12,5,4],[12,1,1],[12,26,1],[12,13,2],[12,5,2],[12,4,3],[12,15,5],[12,2,1],[12,2,1],[12,3,1],[12,5,1],[12,11,1],[12,4,3],[12,1,1],[12,7,2],[12,6,1],[12,14,6],[12,32,4],[12,14,1],[12,31,1],[12,7,3],[12,9,7],[12,5,1],[12,6,1],[12,6,6],[12,7,8],[12,2,1],[12,3,1],[12,4,3],[12,1,1],[12,19,2],[12,11,1],[12,7,2],[12,8,1],[12,15,4],[12,5,1],[12,9,3],[12,2,1],[12,1,1],[12,8,9],[12,3,6],[12,15,1],[13,1,11],[13,7,2],[13,10,1],[13,13,4],[13,3,2],[13,1,2],[13,2,1],[13,3,4],[13,3,1],[13,4,3],[13,5,1],[13,10,13],[13,5,4],[13,2,3],[13,3,2],[13,72,2],[13,7,3],[13,19,2],[13,4,1],[13,5,6],[13,4,2],[13,2,1],[13,2,1],[13,34,11],[13,5,2],[13,9,5],[13,6,2],[13,5,5],[13,9,5],[13,9,1],[13,19,3],[13,4,1],[13,3,1],[13,7,2],[13,1,1],[13,11,7],[13,4,7],[13,6,1],[13,2,1],[13,1,1],[13,21,1],[13,6,15],[13,5,2],[13,1,1],[13,1,2],[14,2,1],[14,18,1],[14,8,2],[14,5,1],[14,2,2],[14,5,2],[14,2,1],[14,8,2],[14,4,1],[14,8,5],[14,14,1],[14,9,6],[14,18,2],[14,4,1],[14,6,1],[14,18,1],[14,6,6],[14,4,1],[14,6,2],[14,6,8],[14,3,1],[14,2,3],[14,1,1],[14,17,4],[14,4,3],[14,15,3],[14,4,8],[14,15,2],[14,6,1],[14,9,22],[14,7,3],[14,7,6],[14,2,2],[14,1,1],[14,7,4],[14,10,1],[14,1,1]])\n #data = np.array([[26,2],[18,3],[30,4],[19,2],[21,1],[40,1],[17,3],[20,3],[19,3],[15,4],[246,1],[57,2],[16,2],[44,101],[31,1],[19,2],[35,2],[25,1],[28,1],[82,1],[52,11],[19,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,4],[1,1],[1,7],[1,9],[1,1],[1,2],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,9],[1,1],[1,1],[1,1],[1,2],[1,6],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,13],[1,1],[1,4],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,7],[1,2],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,1],[1,4],[1,3],[1,2],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,1],[1,3],[1,37],[1,1],[1,2],[1,1],[1,1],[1,50],[1,1],[1,1],[1,1],[1,8],[1,1],[1,1],[1,1],[1,6],[1,2],[1,3],[1,3],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,2],[1,15],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,9],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,12],[2,3],[2,3],[2,1],[2,1],[2,1],[2,4],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,3],[2,2],[2,1],[2,13],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,8],[2,3],[2,1],[2,1],[2,13],[2,2],[2,1],[2,2],[2,3],[2,1],[2,1],[3,1],[3,2],[3,5],[3,1],[3,1],[3,11],[3,3],[3,1],[3,1],[3,6],[3,1],[3,3],[3,1],[3,2],[3,4],[3,2],[3,2],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,1],[4,2],[4,2],[4,9],[4,1],[4,1],[4,5],[4,1],[4,16],[4,1],[4,2],[4,1],[4,1],[4,1],[4,6],[4,2],[4,2],[5,2],[5,2],[5,2],[5,2],[5,3],[5,1],[6,3],[6,1],[6,4],[6,1],[7,1],[7,1],[7,2],[7,1],[7,1],[8,7],[8,1],[8,1],[9,1],[9,3],[9,2],[9,1],[10,1],[10,11],[11,1],[11,2],[12,4],[13,11],[13,2],[14,3],[22,1],[39,3],[107,1],[46,6],[22,1],[15,1],[29,45],[29,1],[35,1],[23,2],[21,1],[17,1],[57,1],[20,1],[19,4],[24,1],[18,2],[61,2],[51,12],[41,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,6],[1,2],[1,1],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,4],[1,7],[1,3],[1,1],[1,15],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,4],[1,2],[1,2],[1,1],[1,4],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,5],[1,8],[1,1],[1,1],[1,2],[1,2],[1,134],[1,45],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,6],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,19],[1,4],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,19],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,5],[1,3],[1,6],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,2],[1,1],[1,26],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,5],[1,4],[1,1],[1,27],[1,1],[1,1],[1,1],[1,11],[1,2],[1,4],[1,1],[1,1],[1,24],[1,2],[1,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,15],[2,1],[2,1],[2,1],[2,3],[2,1],[2,5],[2,1],[2,4],[2,1],[2,1],[2,5],[2,2],[2,1],[2,1],[2,2],[2,1],[2,3],[2,4],[2,1],[2,3],[2,1],[2,2],[2,17],[2,4],[2,2],[2,7],[2,2],[2,1],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,3],[3,1],[3,18],[3,1],[3,1],[3,1],[3,6],[3,8],[3,1],[3,1],[3,2],[3,2],[3,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,4],[4,1],[4,20],[4,2],[4,4],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,3],[4,4],[4,2],[4,2],[4,1],[4,1],[5,3],[5,1],[5,1],[6,1],[6,8],[7,1],[7,1],[7,5],[8,21],[8,1],[8,1],[8,2],[9,1],[10,30],[10,2],[10,3],[10,1],[11,1],[11,2],[11,1],[11,1],[12,1],[12,3],[12,6],[13,1],[13,2],[13,1],[14,1],[14,2],[17,1],[52,1],[64,1],[190,2],[25,3],[19,3],[22,1],[15,2],[25,1],[25,2],[38,1],[69,1],[1,1],[1,4],[1,1],[1,21],[1,1],[1,3],[1,11],[1,31],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,2],[1,212],[1,6],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,3],[1,7],[1,2],[1,5],[1,3],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,9],[1,1],[1,2],[1,2],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,78],[1,3],[1,7],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,3],[1,2],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,8],[2,1],[2,1],[2,5],[2,2],[2,1],[2,6],[2,1],[2,4],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,30],[2,3],[2,5],[2,4],[2,3],[2,1],[2,1],[3,1],[3,2],[3,1],[3,11],[3,1],[3,1],[3,8],[3,2],[3,1],[3,4],[3,3],[3,2],[3,3],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,8],[4,1],[4,2],[4,1],[4,2],[4,1],[4,3],[4,1],[4,2],[4,7],[4,1],[4,1],[4,1],[4,1],[4,7],[5,1],[5,1],[5,2],[5,2],[5,1],[5,11],[5,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,2],[5,8],[5,1],[6,2],[6,8],[6,1],[6,1],[6,1],[6,2],[6,1],[6,2],[6,1],[7,1],[7,3],[7,1],[7,2],[7,6],[7,2],[8,1],[8,6],[8,15],[9,2],[10,3],[10,1],[10,1],[10,2],[10,5],[10,2],[10,64],[11,1],[11,1],[11,1],[12,1],[12,6],[12,1],[12,2],[14,4],[14,1],[17,1],[21,1],[17,1],[32,1],[16,1],[18,5],[17,1],[16,1],[17,2],[262,1],[22,1],[227,5],[82,4],[28,3],[56,7],[42,2],[26,1],[137,1],[55,19],[29,1],[42,2],[1,5],[1,1],[1,2],[1,22],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,4],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,5],[1,7],[1,2],[1,2],[1,1],[1,1],[1,7],[1,1],[1,1],[1,1],[1,2],[1,3],[1,16],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,4],[1,28],[1,6],[1,1],[1,2],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,16],[1,1],[1,2],[1,3],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,2],[1,4],[1,3],[1,4],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,5],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[2,5],[2,5],[2,4],[2,2],[2,32],[2,1],[2,1],[2,4],[2,3],[2,1],[2,1],[2,1],[2,45],[2,3],[2,11],[2,1],[2,1],[2,2],[2,1],[2,4],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,8],[2,2],[2,2],[2,1],[2,2],[2,2],[2,1],[2,7],[2,4],[2,2],[2,4],[2,1],[2,8],[3,1],[3,1],[3,1],[3,3],[3,4],[3,1],[3,10],[3,6],[3,1],[3,1],[3,1],[3,2],[3,4],[3,4],[3,1],[3,1],[3,7],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,19],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,1],[4,2],[4,1],[4,9],[4,4],[4,5],[4,3],[4,2],[4,3],[5,1],[5,2],[5,20],[5,1],[5,2],[5,2],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,4],[5,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,1],[6,1],[6,6],[6,2],[7,1],[7,1],[7,1],[7,4],[8,1],[8,5],[8,14],[9,1],[9,4],[10,1],[10,1],[10,1],[10,1],[11,6],[11,4],[12,1],[12,2],[13,2],[13,1],[13,6],[14,2],[42,4],[264,3],[22,3],[15,6],[19,1],[46,2],[193,1],[15,1],[127,5],[47,1],[16,2],[27,1],[25,1],[19,5],[73,1],[60,1],[27,1],[19,2],[1,2],[1,1],[1,2],[1,2],[1,4],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,16],[1,2],[1,3],[1,2],[1,1],[1,4],[1,20],[1,3],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,3],[1,4],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,1],[1,47],[1,2],[1,2],[1,5],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,16],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,5],[1,2],[1,7],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,14],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,3],[1,4],[1,5],[1,1],[1,1],[1,1],[1,17],[1,71],[1,1],[1,1],[1,1],[1,79],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,7],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[2,1],[2,1],[2,1],[2,4],[2,13],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,6],[2,3],[2,1],[2,1],[2,1],[2,2],[2,17],[2,2],[2,2],[2,8],[2,1],[2,3],[2,2],[2,11],[2,1],[2,2],[2,5],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,3],[2,4],[2,1],[2,6],[2,25],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,3],[3,8],[3,5],[3,3],[3,7],[3,1],[3,1],[3,9],[3,6],[3,3],[3,2],[3,8],[3,4],[3,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[4,1],[4,3],[4,2],[4,1],[4,3],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[5,1],[5,5],[5,3],[5,2],[5,3],[5,1],[5,3],[6,1],[6,1],[6,1],[6,1],[7,1],[7,1],[7,1],[7,1],[7,32],[7,2],[7,1],[7,4],[7,1],[7,1],[7,4],[8,2],[8,2],[8,1],[8,2],[8,1],[9,1],[9,3],[9,1],[9,1],[9,1],[10,3],[11,4],[11,1],[11,1],[11,3],[11,3],[11,1],[12,1],[12,1],[12,1],[13,2],[13,1],[13,2],[14,5],[26,2],[49,1],[26,1],[18,1],[27,1],[15,1],[23,1],[58,3],[36,2],[19,3],[62,2],[72,2],[90,1],[124,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,3],[1,1],[1,4],[1,2],[1,1],[1,1],[1,18],[1,1],[1,2],[1,4],[1,24],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,2],[1,1],[1,1],[1,1],[1,1],[1,8],[1,10],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,17],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,4],[1,2],[1,1],[1,2],[1,25],[1,2],[1,7],[1,1],[1,1],[1,6],[1,1],[1,3],[1,2],[1,4],[1,1],[1,1],[1,6],[1,1],[1,2],[1,3],[1,1],[1,4],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[2,1],[2,5],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,2],[2,6],[2,1],[2,2],[2,1],[2,3],[2,1],[2,2],[2,3],[2,13],[2,1],[2,2],[2,1],[2,3],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,5],[3,2],[3,2],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,3],[3,2],[3,2],[3,1],[3,1],[3,1],[3,1],[3,5],[3,1],[3,4],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,3],[4,3],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[5,1],[5,2],[5,9],[5,2],[5,1],[5,7],[5,2],[5,1],[5,2],[5,2],[5,1],[6,3],[6,1],[6,1],[6,1],[6,1],[6,1],[6,1],[6,29],[6,2],[7,3],[7,2],[7,1],[7,1],[7,2],[7,2],[7,2],[7,3],[7,2],[8,5],[8,1],[8,1],[8,3],[8,2],[8,1],[8,2],[9,1],[9,1],[10,1],[10,14],[10,3],[10,4],[10,3],[10,4],[11,1],[11,5],[11,2],[11,3],[11,1],[11,1],[11,2],[12,1],[12,1],[13,5],[13,1],[13,1],[14,1],[14,3],[14,1],[24,1],[15,1],[19,2],[15,5],[131,1],[28,13],[33,1],[24,1],[17,1],[15,1],[44,2],[16,2],[16,3],[29,7],[29,1],[82,8],[16,1],[17,2],[16,2],[45,1],[159,1],[100,2],[23,1],[15,1],[15,1],[22,1],[48,1],[25,5],[15,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,4],[1,44],[1,1],[1,2],[1,40],[1,1],[1,9],[1,1],[1,17],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,25],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,12],[1,1],[1,2],[1,12],[1,2],[1,2],[1,5],[1,2],[1,3],[1,7],[1,5],[1,72],[1,2],[1,8],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,2],[1,5],[1,3],[1,2],[1,3],[1,382],[1,1],[1,3],[1,1],[1,1],[1,6],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,2],[1,6],[1,1],[1,3],[1,3],[1,1],[1,6],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,1],[1,2],[2,1],[2,1],[2,1],[2,1],[2,12],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,52],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,9],[2,1],[2,1],[2,18],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[3,6],[3,3],[3,4],[3,1],[3,1],[3,1],[3,1],[3,1],[3,4],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,80],[3,1],[3,2],[3,1],[3,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,1],[4,4],[4,4],[4,1],[4,2],[4,2],[4,1],[4,2],[4,1],[4,1],[5,1],[5,1],[5,3],[5,3],[5,1],[5,1],[5,1],[5,2],[5,1],[6,4],[6,3],[6,1],[6,6],[6,1],[6,1],[7,2],[7,1],[7,1],[7,2],[7,1],[7,2],[7,1],[7,1],[8,1],[8,4],[8,1],[8,2],[8,3],[9,2],[9,3],[9,3],[9,6],[10,1],[10,1],[10,1],[10,1],[11,8],[11,1],[11,1],[12,2],[13,5],[15,1],[35,7],[16,1],[24,2],[16,1],[25,1],[65,4],[36,1],[16,5],[21,10],[18,1],[16,12],[29,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,5],[1,3],[1,3],[1,3],[1,1],[1,4],[1,3],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,2],[1,4],[1,2],[1,7],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,4],[1,8],[1,6],[1,1],[1,4],[1,1],[1,1],[1,3],[1,1],[1,3],[1,2],[1,7],[1,2],[1,5],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,5],[1,1],[1,13],[1,3],[1,2],[1,1],[1,1],[1,10],[1,1],[1,2],[1,1],[1,3],[1,12],[1,2],[1,2],[1,4],[1,1],[1,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,4],[2,3],[2,1],[2,1],[2,1],[2,6],[2,1],[2,6],[2,1],[2,2],[2,6],[2,1],[2,10],[2,1],[2,1],[2,4],[2,1],[2,3],[2,3],[2,1],[2,1],[2,3],[2,5],[2,3],[2,10],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,3],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[3,2],[3,1],[3,1],[3,1],[3,5],[3,34],[3,2],[3,3],[3,1],[3,1],[3,2],[3,1],[3,5],[3,1],[3,1],[3,2],[3,4],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,25],[3,1],[3,1],[4,1],[4,6],[4,3],[4,1],[4,6],[4,1],[4,1],[4,4],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,3],[4,4],[5,1],[5,2],[5,3],[5,1],[5,1],[5,1],[5,4],[5,1],[5,2],[5,4],[5,1],[5,1],[6,1],[6,4],[6,2],[6,1],[6,1],[6,2],[6,3],[7,11],[7,1],[7,5],[8,2],[8,1],[8,1],[9,2],[9,5],[9,4],[9,3],[9,1],[9,2],[9,2],[10,1],[10,2],[11,1],[12,3],[12,1],[13,11],[13,1],[17,1],[201,2],[16,2],[104,4],[123,2],[15,1],[26,5],[74,1],[15,3],[15,7],[16,1],[39,2],[27,1],[32,1],[53,4],[28,1],[25,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,16],[1,3],[1,2],[1,2],[1,3],[1,1],[1,1],[1,3],[1,11],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,4],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,32],[1,2],[1,1],[1,1],[1,6],[1,1],[1,7],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,55],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,5],[1,4],[1,7],[1,1],[1,1],[1,6],[1,2],[1,2],[1,6],[1,3],[1,2],[1,1],[1,6],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,4],[1,9],[1,2],[1,3],[1,1],[2,1],[2,1],[2,11],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,4],[2,1],[2,2],[2,2],[2,2],[2,3],[2,4],[2,2],[2,5],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,6],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,5],[3,1],[3,1],[3,2],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,3],[4,3],[4,1],[4,4],[4,1],[4,2],[4,1],[4,3],[4,1],[5,1],[5,2],[5,1],[5,3],[5,3],[5,1],[5,2],[5,9],[5,1],[5,1],[5,2],[5,1],[5,2],[6,2],[6,3],[6,1],[6,1],[6,2],[6,1],[6,2],[6,2],[6,1],[6,4],[6,2],[7,7],[7,2],[7,4],[7,1],[7,2],[7,19],[7,1],[7,1],[7,1],[8,1],[8,12],[8,1],[8,3],[8,1],[9,1],[9,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,4],[10,2],[12,3],[12,1],[12,1],[13,1],[13,1],[14,1],[14,1],[14,3],[30,7],[32,1],[40,2],[16,1],[91,6],[122,1],[15,1],[17,1],[20,3],[19,2],[19,1],[98,2],[81,14],[47,4],[38,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,83],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,7],[1,1],[1,2],[1,4],[1,1],[1,1],[1,88],[1,2],[1,2],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,57],[1,2],[1,6],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,5],[1,5],[1,1],[1,1],[1,9],[1,1],[1,1],[1,3],[1,4],[1,1],[1,2],[1,5],[1,2],[1,3],[1,1],[1,2],[1,4],[1,4],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,6],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,2],[2,2],[2,15],[2,4],[2,1],[2,1],[2,2],[2,1],[2,2],[2,3],[2,3],[2,3],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,7],[2,1],[2,4],[2,3],[2,2],[2,3],[2,1],[2,1],[2,2],[3,4],[3,1],[3,1],[3,2],[3,3],[3,6],[3,2],[3,9],[3,9],[3,2],[3,2],[3,1],[3,15],[3,1],[3,1],[3,1],[3,3],[4,1],[4,1],[4,2],[4,3],[4,1],[4,2],[4,1],[4,6],[4,2],[4,8],[4,9],[4,1],[4,1],[4,1],[5,1],[5,1],[5,78],[5,1],[5,1],[5,1],[5,17],[5,1],[5,3],[5,2],[5,1],[6,1],[6,1],[6,5],[6,19],[6,1],[6,6],[6,1],[6,1],[6,2],[6,1],[6,1],[6,1],[6,2],[6,1],[7,2],[7,1],[7,1],[7,4],[7,1],[7,28],[7,1],[8,1],[8,1],[8,1],[9,3],[9,1],[9,11],[9,4],[10,1],[10,2],[11,1],[11,1],[11,1],[11,1],[12,1],[14,2],[14,2],[14,2],[18,2],[31,1],[29,2],[16,1],[17,20],[25,1],[20,3],[59,1],[25,1],[27,2],[26,1],[44,1],[17,4],[16,4],[20,6],[67,2],[15,1],[65,1],[17,1],[33,1],[61,2],[1,2],[1,2],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,4],[1,5],[1,2],[1,1],[1,1],[1,18],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,2],[1,5],[1,4],[1,1],[1,4],[1,1],[1,1],[1,1],[1,56],[1,1],[1,4],[1,1],[1,9],[1,6],[1,9],[1,1],[1,2],[1,1],[1,1],[1,1],[1,18],[1,10],[1,1],[1,5],[1,1],[1,1],[1,2],[1,5],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,8],[1,3],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,5],[1,2],[1,27],[1,3],[1,1],[1,2],[1,9],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,15],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,17],[1,1],[1,4],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,18],[1,1],[1,2],[1,46],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,7],[1,8],[1,1],[1,3],[1,6],[2,1],[2,1],[2,1],[2,1],[2,5],[2,4],[2,1],[2,2],[2,2],[2,4],[2,2],[2,1],[2,2],[2,1],[2,3],[2,5],[2,1],[2,2],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,12],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,3],[2,1],[2,2],[2,1],[2,10],[2,2],[2,8],[2,2],[2,2],[2,1],[2,5],[2,5],[2,4],[2,1],[2,1],[2,1],[2,1],[3,2],[3,6],[3,2],[3,1],[3,58],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,6],[3,10],[3,1],[3,4],[3,1],[3,1],[3,6],[3,1],[3,29],[3,2],[3,2],[3,6],[3,1],[4,1],[4,4],[4,2],[4,1],[4,46],[4,2],[4,1],[4,2],[4,2],[4,3],[4,11],[4,3],[4,1],[4,2],[4,1],[4,15],[4,2],[5,5],[5,9],[5,1],[5,2],[5,136],[5,48],[5,5],[5,1],[5,1],[5,1],[5,1],[5,1],[6,1],[6,1],[6,10],[6,1],[6,2],[6,1],[7,2],[7,1],[7,3],[7,2],[7,11],[7,6],[7,1],[8,1],[8,3],[8,2],[8,1],[8,12],[8,2],[8,2],[9,1],[9,1],[9,1],[9,4],[10,1],[10,2],[11,2],[12,9],[13,1],[14,2],[21,1],[26,1],[16,2],[29,1],[16,5],[401,3],[33,1],[19,31],[15,4],[28,2],[23,1],[42,4],[40,1],[70,1],[15,3],[15,2],[22,1],[103,1],[256,27],[41,1],[86,1],[17,1],[31,1],[26,1],[105,2],[28,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,6],[1,4],[1,1],[1,4],[1,7],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,2],[1,2],[1,8],[1,1],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,1],[1,9],[1,1],[1,2],[1,2],[1,3],[1,2],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,5],[1,1],[1,29],[1,1],[1,4],[1,2],[1,3],[1,3],[1,17],[1,6],[1,2],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,9],[1,3],[1,1],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,1],[1,7],[1,1],[1,5],[1,1],[1,1],[1,4],[1,1],[1,2],[1,6],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,16],[1,5],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,8],[2,3],[2,1],[2,2],[2,4],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,9],[2,1],[2,23],[2,1],[2,1],[2,1],[2,2],[2,3],[2,1],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,25],[2,2],[2,3],[2,2],[2,1],[2,1],[2,3],[2,1],[2,3],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[3,1],[3,2],[3,2],[3,3],[3,2],[3,1],[3,1],[3,5],[3,9],[3,1],[3,3],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,9],[3,1],[3,2],[3,7],[3,3],[3,4],[3,2],[3,1],[3,37],[3,1],[3,1],[3,1],[3,1],[4,1],[4,2],[4,305],[4,4],[4,1],[4,1],[4,1],[4,4],[4,3],[4,1],[4,6],[4,7],[4,1],[4,1],[4,1],[4,1],[4,29],[4,1],[5,10],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[6,2],[6,1],[6,1],[6,2],[7,1],[7,1],[7,2],[7,1],[7,1],[7,1],[7,2],[8,1],[8,3],[8,2],[9,1],[9,1],[10,1],[10,3],[10,1],[11,6],[11,2],[11,1],[11,1],[12,5],[12,4],[12,1],[14,1],[14,1],[23,1],[26,2],[15,2],[16,16],[31,7],[18,3],[22,3],[87,1],[17,2],[17,9],[30,1],[58,4],[24,2],[28,5],[53,1],[23,1],[28,2],[44,1],[60,3],[17,2],[17,1],[1,1],[1,2],[1,1],[1,11],[1,1],[1,1],[1,2],[1,2],[1,3],[1,2],[1,6],[1,3],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,3],[1,2],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,3],[1,1],[1,5],[1,3],[1,3],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,3],[1,5],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,15],[1,1],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,3],[1,15],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,5],[1,3],[1,1],[1,1],[1,14],[1,1],[1,2],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,5],[1,2],[1,3],[1,1],[1,2],[1,9],[1,1],[1,4],[1,1],[1,2],[1,8],[1,1],[1,3],[1,1],[1,1],[1,4],[1,4],[1,3],[1,1],[1,1],[1,9],[1,2],[1,4],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,2],[1,3],[1,2],[1,6],[1,1],[1,18],[2,1],[2,3],[2,3],[2,1],[2,6],[2,1],[2,2],[2,2],[2,5],[2,1],[2,1],[2,1],[2,3],[2,2],[2,6],[2,1],[2,3],[2,3],[2,1],[2,3],[2,2],[2,2],[2,1],[2,1],[2,9],[2,5],[2,1],[2,1],[2,1],[2,2],[2,85],[2,60],[2,2],[2,1],[2,12],[2,1],[2,1],[2,1],[2,8],[2,1],[2,21],[2,1],[2,3],[2,1],[2,1],[2,8],[2,1],[2,1],[3,3],[3,3],[3,1],[3,3],[3,3],[3,1],[3,2],[3,2],[3,1],[3,1],[3,14],[3,1],[3,6],[3,1],[3,2],[3,1],[3,3],[3,2],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,2],[4,3],[4,2],[4,1],[4,3],[4,1],[4,1],[4,2],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,4],[5,1],[5,1],[5,1],[5,3],[5,2],[5,1],[5,4],[6,6],[6,1],[6,18],[6,1],[6,1],[6,1],[6,5],[6,2],[6,3],[6,2],[7,3],[7,5],[7,2],[7,1],[7,3],[7,5],[7,1],[7,1],[7,1],[7,1],[8,1],[8,1],[8,3],[8,1],[8,1],[8,4],[9,1],[9,2],[9,4],[10,2],[10,1],[11,2],[11,1],[11,1],[12,3],[13,1],[14,2],[32,7],[26,2],[22,2],[15,1],[26,46],[15,2],[16,1],[19,1],[36,1],[16,2],[24,1],[20,5],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,1],[1,10],[1,5],[1,13],[1,2],[1,3],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,8],[1,1],[1,3],[1,5],[1,1],[1,2],[1,2],[1,2],[1,4],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,8],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[1,4],[1,3],[1,2],[1,9],[1,19],[1,1],[1,1],[1,1],[1,1],[1,14],[1,3],[1,2],[1,4],[1,2],[1,1],[1,4],[1,1],[1,1],[1,5],[1,2],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,11],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,3],[1,9],[1,2],[1,6],[1,9],[1,3],[1,1],[1,1],[1,5],[1,1],[1,3],[1,2],[1,9],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,4],[1,2],[1,1],[1,3],[1,2],[1,1],[1,12],[1,1],[1,1],[1,1],[1,1],[2,5],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,3],[2,3],[2,114],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,9],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,3],[2,19],[2,1],[2,8],[2,2],[2,2],[2,7],[2,1],[2,1],[3,2],[3,1],[3,5],[3,3],[3,1],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,30],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,2],[3,2],[3,1],[3,2],[3,2],[3,1],[3,2],[3,1],[3,2],[4,1],[4,3],[4,1],[4,1],[4,7],[4,2],[4,2],[4,3],[4,3],[4,2],[4,2],[4,1],[4,1],[4,2],[4,1],[4,2],[4,1],[4,1],[4,6],[5,2],[5,1],[5,2],[5,1],[5,7],[5,7],[5,1],[5,2],[5,1],[6,1],[6,1],[6,1],[6,2],[6,1],[6,1],[6,4],[6,1],[7,1],[7,1],[7,1],[7,3],[7,1],[7,1],[7,1],[8,1],[8,2],[8,3],[8,1],[8,1],[8,9],[8,6],[9,1],[9,3],[9,4],[10,4],[10,1],[10,3],[10,1],[10,19],[11,3],[11,2],[11,5],[11,5],[11,1],[12,7],[13,3],[13,4],[13,2],[13,4],[14,2],[16,1],[93,1],[22,2],[42,6],[15,1],[16,3],[36,8],[34,1],[30,3],[43,7],[46,8],[40,1],[22,1],[1,3],[1,1],[1,13],[1,2],[1,3],[1,2],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,13],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,6],[1,4],[1,1],[1,4],[1,1],[1,2],[1,3],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,3],[1,2],[1,3],[1,3],[1,2],[1,1],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,2],[1,2],[1,3],[1,7],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,4],[1,5],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,7],[1,6],[1,1],[1,2],[1,3],[1,3],[1,1],[1,4],[1,2],[1,7],[1,2],[1,5],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,3],[1,6],[1,2],[1,2],[1,1],[1,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,3],[2,1],[2,2],[2,12],[2,1],[2,1],[2,3],[2,3],[2,1],[2,2],[2,3],[2,3],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,8],[2,2],[2,1],[2,2],[2,1],[2,1],[2,7],[2,1],[2,1],[2,1],[2,7],[2,2],[2,1],[2,18],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,5],[2,1],[2,1],[2,6],[2,3],[2,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[4,6],[4,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,2],[4,2],[4,5],[4,2],[4,2],[4,2],[4,2],[4,1],[4,3],[4,2],[4,1],[5,1],[5,3],[5,2],[5,2],[5,1],[5,1],[5,3],[5,1],[5,1],[5,2],[5,4],[5,4],[5,1],[6,2],[6,2],[6,2],[6,1],[6,1],[6,1],[6,1],[6,4],[6,1],[7,2],[7,1],[7,2],[7,1],[7,1],[7,1],[8,2],[8,2],[8,3],[8,14],[9,5],[9,2],[9,1],[9,1],[10,8],[10,2],[11,1],[11,1],[12,1],[12,1],[12,1],[12,7],[12,3],[48,1],[73,3],[22,2],[19,1],[20,1],[40,2],[15,2],[34,1],[22,5],[31,2],[47,28],[51,1],[19,2],[231,1],[15,3],[18,2],[18,3],[101,5],[65,2],[30,11],[18,3],[1,1],[1,2],[1,2],[1,1],[1,3],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,64],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,4],[1,5],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,14],[1,1],[1,1],[1,1],[1,1],[1,2],[1,12],[1,2],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,1],[1,5],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,2],[1,3],[1,1],[2,2],[2,1],[2,3],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,10],[2,2],[2,1],[2,2],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,6],[2,2],[2,4],[2,9],[2,2],[2,1],[2,3],[2,2],[2,10],[2,3],[2,1],[2,37],[2,2],[2,2],[2,2],[3,9],[3,4],[3,3],[3,2],[3,2],[3,1],[3,19],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,2],[3,10],[3,1],[3,1],[3,1],[3,1],[3,3],[3,6],[4,2],[4,5],[4,1],[4,3],[4,10],[4,1],[4,1],[4,1],[4,1],[4,4],[4,5],[4,1],[4,1],[4,2],[5,2],[5,2],[5,1],[5,2],[5,1],[5,3],[5,2],[5,1],[5,1],[6,3],[6,1],[6,1],[6,6],[6,1],[6,3],[7,2],[7,1],[7,1],[7,1],[7,1],[7,1],[8,1],[8,2],[8,1],[8,3],[8,1],[9,1],[9,1],[9,2],[10,3],[10,4],[10,1],[11,1],[12,1],[12,1],[13,1],[13,3],[13,1],[14,1],[35,2],[15,7],[32,1],[80,1],[22,2],[16,1],[25,1],[156,1],[175,2],[460,1],[63,1],[74,3],[121,2],[16,3],[49,5],[29,1],[16,1],[1,5],[1,4],[1,3],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,3],[1,4],[1,12],[1,1],[1,3],[1,1],[1,2],[1,3],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,12],[1,1],[1,1],[1,3],[1,1],[1,2],[1,38],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,10],[1,3],[1,3],[1,4],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,6],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,9],[1,1],[1,1],[1,4],[1,4],[1,3],[1,3],[1,2],[1,1],[1,6],[1,2],[1,3],[1,1],[1,5],[1,2],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,3],[1,1],[1,6],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,2],[1,8],[1,1],[1,3],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,3],[1,1],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[2,1],[2,1],[2,4],[2,7],[2,1],[2,3],[2,2],[2,3],[2,2],[2,10],[2,2],[2,6],[2,4],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,4],[2,1],[2,1],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,10],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,3],[2,2],[2,2],[3,5],[3,3],[3,26],[3,1],[3,4],[3,2],[3,5],[3,1],[3,3],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,1],[3,4],[3,2],[4,8],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,2],[4,1],[4,5],[4,1],[4,2],[4,2],[4,2],[4,3],[4,2],[5,2],[5,1],[5,2],[5,3],[5,1],[5,1],[5,3],[5,1],[5,1],[5,1],[6,4],[6,2],[6,1],[6,1],[6,7],[6,2],[7,1],[7,1],[7,1],[7,3],[7,3],[7,3],[8,2],[8,1],[8,3],[9,3],[9,2],[9,1],[9,3],[9,2],[10,1],[10,1],[10,4],[11,2],[11,1],[11,1],[12,1],[12,55],[12,1],[13,1],[35,4],[21,9],[26,1],[165,7],[21,1],[55,5],[19,10],[18,5],[17,1],[67,1],[68,4],[19,1],[24,6],[89,3],[21,1],[40,1],[52,2],[16,1],[1,3],[1,4],[1,1],[1,4],[1,2],[1,3],[1,1],[1,3],[1,1],[1,4],[1,1],[1,1],[1,14],[1,5],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,22],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,4],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,2],[1,5],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,4],[1,1],[1,2],[1,37],[1,1],[1,2],[1,1],[1,2],[1,2],[1,5],[1,1],[1,1],[1,11],[1,2],[1,1],[1,1],[1,1],[1,7],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,6],[1,2],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,4],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,11],[1,2],[1,1],[1,6],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,8],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,5],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[2,19],[2,6],[2,3],[2,1],[2,2],[2,3],[2,2],[2,6],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,7],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,7],[2,1],[2,3],[2,3],[2,1],[3,6],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,3],[3,1],[3,1],[3,29],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[3,15],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,7],[3,3],[3,4],[3,1],[4,2],[4,10],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[5,3],[5,2],[5,1],[5,4],[5,1],[5,2],[5,1],[6,13],[6,2],[6,2],[6,2],[6,1],[6,1],[6,1],[7,1],[7,1],[7,2],[8,1],[8,1],[8,1],[9,2],[9,1],[9,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,112],[10,1],[11,1],[11,3],[11,11],[12,1],[13,2],[13,1],[13,2],[14,1],[78,1],[43,1],[20,1],[15,1],[26,5],[17,2],[32,2],[93,2],[57,2],[25,1],[112,4],[18,1],[73,1],[30,55],[24,1],[699,1],[17,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,2],[1,3],[1,1],[1,4],[1,5],[1,3],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,4],[1,4],[1,1],[1,3],[1,1],[1,1],[1,1],[1,9],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,13],[1,2],[1,1],[1,1],[1,1],[1,7],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,15],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,7],[1,3],[1,1],[1,1],[1,1],[1,5],[1,1],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,6],[1,2],[1,4],[1,15],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,2],[1,1],[2,1],[2,10],[2,3],[2,1],[2,1],[2,1],[2,3],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,2],[2,1],[2,24],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,5],[2,3],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,4],[2,1],[3,2],[3,2],[3,1],[3,2],[3,1],[3,3],[3,1],[3,1],[3,1],[3,3],[3,13],[3,10],[3,7],[3,1],[3,1],[3,1],[3,9],[3,9],[3,1],[3,2],[3,11],[3,1],[3,4],[3,1],[3,1],[4,2],[4,1],[4,2],[4,1],[4,115],[4,1],[4,1],[4,1],[4,1],[4,2],[4,2],[4,1],[4,2],[4,4],[4,9],[4,1],[4,1],[5,1],[5,2],[5,3],[5,2],[5,1],[5,4],[5,1],[5,2],[5,1],[5,1],[5,1],[5,7],[5,1],[5,1],[6,39],[6,2],[6,3],[6,1],[7,1],[7,2],[7,3],[7,1],[7,2],[7,8],[7,1],[8,3],[8,1],[8,1],[8,1],[8,1],[9,3],[9,2],[9,1],[10,3],[10,25],[10,1],[10,1],[11,6],[11,1],[11,1],[11,1],[11,7],[12,1],[12,1],[12,1],[13,1],[13,1],[14,8],[14,1],[14,1],[74,2],[26,11],[69,1],[108,1],[20,5],[21,1],[16,1],[16,3],[32,2],[62,2],[50,1],[16,1],[15,1],[22,5],[1,2],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,5],[1,10],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,4],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,9],[1,7],[1,9],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,15],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,42],[1,12],[1,3],[1,3],[1,5],[1,2],[1,1],[1,5],[1,4],[1,3],[1,3],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,12],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,5],[1,1],[1,16],[1,1],[1,7],[1,1],[1,1],[1,3],[1,1],[1,7],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[2,1],[2,3],[2,1],[2,1],[2,9],[2,2],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,3],[2,2],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,4],[2,2],[2,1],[2,10],[2,2],[2,1],[2,4],[2,1],[2,4],[2,3],[2,1],[2,1],[2,1],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[3,1],[3,3],[3,135],[3,1],[3,10],[3,1],[3,1],[3,3],[3,2],[3,2],[3,2],[3,5],[3,1],[3,2],[3,7],[3,2],[3,1],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[4,91],[4,2],[4,2],[4,3],[4,10],[4,3],[4,2],[4,3],[4,1],[4,1],[4,32],[4,2],[4,2],[5,1],[5,1],[5,3],[5,1],[5,3],[5,2],[5,1],[5,34],[5,2],[5,7],[5,2],[5,1],[6,2],[6,1],[6,5],[6,2],[6,1],[6,1],[7,2],[7,2],[7,1],[7,1],[7,6],[7,1],[8,1],[8,2],[8,1],[8,5],[8,4],[8,1],[8,3],[8,1],[9,4],[9,7],[9,1],[11,2],[11,2],[11,1],[11,1],[11,2],[11,19],[11,6],[12,6],[13,2],[13,1],[13,1],[14,1],[76,1],[65,1],[15,2],[19,1],[15,1],[32,1],[33,1],[19,4],[27,3],[62,7],[36,2],[39,3],[44,3],[17,1],[940,4],[20,1],[16,5],[17,4],[21,1],[46,1],[55,1],[251,12],[27,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,12],[1,8],[1,1],[1,1],[1,5],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,9],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,3],[1,2],[1,1],[1,3],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,32],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,11],[1,4],[1,15],[1,3],[1,2],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,11],[1,9],[1,1],[1,2],[1,6],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,128],[1,3],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,1],[1,3],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,17],[1,1],[1,1],[1,1],[1,3],[1,8],[2,1],[2,1],[2,3],[2,1],[2,3],[2,2],[2,4],[2,2],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,10],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[3,1],[3,2],[3,1],[3,8],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,3],[3,2],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,3],[4,1],[4,2],[4,2],[4,1],[4,1],[5,33],[5,5],[5,2],[5,1],[5,5],[5,48],[6,2],[6,3],[6,2],[6,1],[6,1],[6,2],[6,3],[6,1],[6,3],[7,8],[7,1],[7,1],[7,2],[8,1],[8,1],[8,1],[8,1],[8,2],[8,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,1],[11,2],[11,5],[12,1],[12,2],[12,2],[17,4],[17,1],[15,2],[29,5],[38,1],[20,1],[16,2],[24,1],[42,1],[29,1],[60,2],[20,1],[168,4],[17,33],[83,2],[71,1],[16,1],[18,3],[54,1],[15,8],[22,1],[36,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,7],[1,5],[1,1],[1,9],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,7],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,15],[1,1],[1,3],[1,2],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,2],[1,1],[1,143],[1,1],[1,1],[1,2],[1,4],[1,4],[1,2],[1,2],[1,96],[1,1],[1,4],[1,16],[1,2],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,8],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,4],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,6],[1,1],[1,15],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[1,2],[1,4],[1,1],[1,6],[1,5],[1,6],[1,1],[1,1],[1,2],[1,2],[1,1],[1,5],[1,2],[1,2],[1,12],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,3],[1,8],[2,1],[2,1],[2,2],[2,3],[2,1],[2,3],[2,1],[2,1],[2,1],[2,5],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,14],[2,1],[2,1],[2,1],[2,5],[2,1],[2,7],[2,3],[2,1],[2,3],[2,2],[2,3],[2,1],[2,1],[2,33],[2,1],[2,1],[2,1],[2,2],[2,3],[2,5],[2,1],[2,2],[2,8],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[3,1],[3,2],[3,1],[3,1],[3,1],[3,3],[3,16],[3,1],[3,4],[3,1],[3,1],[3,8],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[3,2],[3,1],[3,1],[3,2],[3,5],[3,6],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,4],[3,1],[4,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,4],[4,2],[4,3],[4,1],[4,2],[4,2],[4,3],[4,1],[4,1],[4,1],[4,1],[4,45],[5,2],[5,1],[5,4],[5,2],[5,1],[5,1],[5,1],[5,1],[5,3],[5,1],[5,3],[6,5],[6,13],[6,4],[6,1],[6,2],[6,1],[6,2],[7,3],[7,1],[7,2],[7,1],[7,1],[8,1],[8,1],[8,1],[8,11],[8,4],[8,1],[8,1],[9,2],[9,1],[10,1],[10,1],[10,2],[11,25],[11,1],[11,1],[11,7],[11,1],[12,3],[12,1],[12,1],[26,3],[29,11],[18,1],[20,1],[15,1],[16,1],[35,4],[15,1],[63,2],[39,1],[64,4],[15,1],[15,1],[26,1],[64,1],[40,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,12],[1,1],[1,1],[1,2],[1,2],[1,3],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,10],[1,1],[1,1],[1,16],[1,1],[1,2],[1,47],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,170],[1,2],[1,2],[1,1],[1,1],[1,3],[1,3],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,14],[1,35],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,15],[1,13],[1,2],[1,1],[1,1],[1,8],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,1],[1,53],[1,1],[1,4],[1,3],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,14],[2,3],[2,1],[2,2],[2,3],[2,9],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,4],[2,8],[2,3],[2,1],[2,1],[2,3],[2,2],[2,1],[2,1],[2,1],[2,2],[2,4],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,3],[2,1],[2,1],[2,4],[2,2],[2,161],[2,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,51],[3,1],[3,1],[3,3],[3,1],[3,3],[3,2],[3,1],[3,1],[3,2],[3,3],[3,4],[3,2],[3,2],[3,1],[3,1],[3,10],[3,1],[4,1],[4,1],[4,1],[4,4],[4,1],[4,1],[4,4],[4,1],[4,5],[4,9],[4,1],[4,3],[4,1],[5,4],[5,3],[5,1],[5,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,1],[5,1],[6,7],[6,1],[6,1],[6,1],[6,1],[6,1],[6,3],[6,2],[7,1],[7,2],[7,1],[7,1],[8,1],[8,2],[8,2],[9,1],[9,1],[10,3],[10,1],[10,1],[10,3],[11,9],[11,1],[11,1],[11,1],[11,1],[11,2],[11,2],[12,1],[12,4],[13,2],[13,2],[13,15],[14,1],[14,1],[17,3],[185,1],[51,1],[21,3],[19,3],[17,1],[29,1],[38,4],[169,24],[41,4],[15,1],[59,5],[87,3],[169,1],[29,5],[28,1],[25,4],[48,1],[15,3],[18,1],[22,2],[36,4],[134,1],[19,1],[15,1],[17,3],[56,1],[24,1],[17,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,3],[1,6],[1,4],[1,6],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,9],[1,79],[1,1],[1,4],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[1,3],[1,3],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,3],[1,5],[1,4],[1,1],[1,2],[1,5],[1,2],[1,1],[1,10],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,24],[1,2],[1,1],[1,11],[1,2],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,4],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,31],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,7],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,13],[1,5],[1,3],[1,2],[1,4],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,3],[1,3],[1,1],[1,2],[1,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,5],[2,2],[2,8],[2,1],[2,1],[2,1],[2,3],[2,13],[2,6],[2,1],[2,4],[2,1],[2,2],[2,2],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,6],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,4],[2,6],[2,1],[2,1],[2,1],[2,1],[2,6],[2,1],[2,1],[2,1],[2,2],[2,2],[2,4],[3,1],[3,1],[3,2],[3,1],[3,5],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,6],[3,1],[3,8],[3,1],[3,1],[3,1],[3,1],[3,13],[3,3],[3,1],[3,2],[3,2],[3,1],[4,4],[4,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,2],[5,4],[5,1],[5,2],[5,3],[5,1],[5,1],[5,1],[5,1],[5,2],[6,8],[7,1],[7,1],[7,2],[8,2],[8,2],[8,2],[8,3],[8,3],[8,1],[8,1],[9,1],[9,1],[10,1],[10,3],[10,1],[12,3],[12,2],[12,2],[12,1],[12,1],[12,1],[13,3],[13,1],[13,1],[14,1],[17,1],[25,7],[15,6],[111,8],[92,1],[26,21],[328,1],[16,1],[752,1],[16,1],[22,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,2],[1,3],[1,6],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,5],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,1],[1,1],[1,4],[1,2],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,8],[1,2],[1,2],[1,3],[1,2],[1,2],[1,3],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,6],[1,1],[1,1],[1,2],[1,2],[1,6],[1,1],[1,1],[1,8],[1,5],[1,1],[1,2],[1,4],[1,21],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[2,5],[2,1],[2,1],[2,4],[2,2],[2,1],[2,3],[2,1],[2,2],[2,8],[2,1],[2,2],[2,12],[2,2],[2,2],[2,1],[2,5],[2,2],[2,2],[2,1],[2,2],[2,1],[2,3],[2,4],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,4],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,4],[2,5],[2,1],[2,2],[2,2],[2,9],[2,1],[2,1],[3,3],[3,1],[3,1],[3,5],[3,1],[3,2],[3,3],[3,1],[3,12],[3,2],[3,1],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,1],[3,1],[3,7],[4,2],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,3],[5,1],[5,2],[5,1],[5,1],[5,1],[5,1],[6,1],[6,5],[6,11],[6,1],[6,1],[6,2],[6,1],[6,4],[6,1],[6,1],[7,5],[7,1],[7,1],[8,1],[8,3],[9,2],[9,1],[10,1],[11,1],[11,1],[11,2],[11,1],[12,4],[12,2],[13,1],[13,1],[13,2],[14,6],[14,1],[68,4],[113,4],[22,1],[48,79],[28,2],[88,1],[232,2],[23,1],[32,1],[72,2],[26,1],[20,1],[53,1],[16,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,8],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,6],[1,1],[1,3],[1,1],[1,3],[1,4],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,5],[1,4],[1,1],[1,1],[1,9],[1,6],[1,5],[1,1],[1,1],[1,3],[1,2],[1,9],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,2],[1,1],[1,16],[1,3],[1,1],[1,86],[1,1],[1,2],[1,4],[1,2],[1,16],[1,9],[1,4],[1,2],[1,9],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,10],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,12],[1,2],[1,4],[1,1],[1,1],[1,2],[1,2],[1,4],[2,6],[2,3],[2,2],[2,1],[2,3],[2,2],[2,2],[2,2],[2,6],[2,1],[2,4],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,2],[2,1],[2,2],[2,9],[2,10],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,8],[2,2],[2,1],[2,3],[2,1],[3,1],[3,1],[3,1],[3,2],[3,7],[3,5],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,5],[3,2],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[4,2],[5,5],[5,2],[5,9],[5,5],[5,1],[5,2],[5,1],[5,2],[6,7],[6,7],[7,3],[7,8],[7,1],[7,1],[7,2],[7,7],[8,1],[8,1],[8,1],[9,6],[9,4],[10,2],[10,1],[10,1],[10,3],[10,2],[11,1],[12,5],[12,3],[12,1],[13,1],[14,2],[14,3],[14,4],[30,1],[19,1],[27,1],[24,12],[20,24],[20,1],[80,1],[26,1],[25,1],[35,1],[150,1],[22,1],[28,1],[187,2],[15,2],[21,1],[22,1],[17,8],[27,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,4],[1,1],[1,3],[1,5],[1,1],[1,10],[1,8],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,3],[1,7],[1,3],[1,1],[1,10],[1,1],[1,4],[1,1],[1,1],[1,2],[1,7],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,2],[1,3],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,1],[1,5],[1,1],[1,1],[1,5],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,17],[1,4],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,1],[1,6],[1,2],[1,1],[1,28],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[2,1],[2,3],[2,1],[2,4],[2,1],[2,3],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,9],[2,1],[2,1],[2,7],[2,3],[2,1],[2,1],[2,3],[2,4],[2,2],[2,2],[2,2],[2,1],[2,3],[2,2],[2,3],[2,2],[2,1],[2,1],[2,2],[3,10],[3,1],[3,3],[3,4],[3,4],[3,398],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,1],[3,4],[3,3],[3,2],[3,1],[4,2],[4,16],[4,3],[4,2],[4,1],[4,4],[4,1],[4,1],[4,4],[4,1],[4,1],[4,1],[4,21],[4,5],[4,1],[4,3],[4,2],[4,2],[4,1],[4,2],[4,1],[4,2],[5,3],[5,1],[5,3],[5,1],[5,5],[5,7],[5,1],[5,1],[5,1],[5,7],[5,4],[5,6],[5,1],[6,1],[6,2],[6,3],[6,2],[6,1],[6,3],[7,8],[7,6],[7,1],[7,2],[7,1],[7,1],[8,4],[8,1],[8,4],[8,1],[8,1],[8,8],[8,3],[9,1],[9,1],[9,2],[10,6],[11,1],[11,1],[11,1],[12,1],[12,4],[12,6],[13,3],[13,1],[520,3],[292,13],[16,1],[20,1],[44,3],[22,1],[17,2],[18,1],[46,5],[19,1],[15,3],[28,1],[23,1],[19,13],[25,2],[23,134],[68,1],[79,13],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,3],[1,1],[1,2],[1,6],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,12],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,2],[1,6],[1,1],[1,1],[1,36],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,3],[1,2],[1,2],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,22],[1,1],[1,1],[1,1],[1,187],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,5],[1,4],[1,1],[1,2],[1,1],[1,20],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,5],[2,1],[2,2],[2,1],[2,1],[2,6],[2,6],[2,9],[2,1],[2,2],[2,1],[2,2],[2,2],[2,3],[2,6],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,44],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[3,9],[3,4],[3,1],[3,2],[3,1],[3,1],[3,1],[3,4],[3,2],[3,1],[3,1],[3,21],[3,6],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,3],[3,1],[3,3],[3,5],[3,1],[3,1],[3,5],[3,1],[3,2],[3,2],[3,1],[3,1],[3,1],[4,92],[4,1],[4,1],[4,1],[4,13],[4,4],[4,1],[4,1],[4,2],[4,1],[4,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,3],[5,3],[5,1],[5,1],[5,1],[5,4],[5,1],[6,1],[6,3],[6,2],[6,23],[6,2],[6,3],[6,35],[7,1],[7,1],[7,1],[8,690],[8,1],[8,3],[9,2],[9,5],[9,1],[10,4],[11,6],[12,4],[12,1],[14,15],[14,1],[18,1],[46,1],[16,1],[24,4],[27,2],[21,1],[98,1],[107,3],[44,16],[16,1],[28,1],[1,1],[1,2],[1,7],[1,3],[1,1],[1,1],[1,2],[1,2],[1,14],[1,1],[1,1],[1,1],[1,36],[1,1],[1,3],[1,4],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,13],[1,51],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,6],[1,2],[1,2],[1,1],[1,3],[1,1],[1,5],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,94],[1,6],[1,1],[1,1],[1,1],[1,2],[1,4],[1,5],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,2],[1,2],[1,5],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,4],[1,1],[1,28],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,10],[1,4],[1,4],[1,2],[1,1],[1,3],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,3],[1,5],[1,7],[2,1],[2,5],[2,1],[2,3],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,7],[2,7],[2,2],[2,4],[2,3],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[3,1],[3,1],[3,2],[3,2],[3,1],[3,1],[3,5],[3,5],[3,1],[3,1],[3,10],[3,30],[3,1],[3,1],[3,1],[3,3],[3,1],[3,4],[3,3],[3,3],[3,1],[3,1],[3,2],[3,1],[3,92],[3,1],[4,4],[4,1],[4,2],[4,5],[4,1],[4,2],[4,2],[4,1],[4,4],[4,1],[4,1],[4,1],[5,1],[5,2],[5,1],[5,1],[5,1],[5,4],[5,2],[5,1],[5,10],[6,2],[6,1],[6,1],[6,1],[6,4],[6,2],[6,1],[6,1],[6,2],[7,1],[7,1],[7,1],[7,1],[7,2],[7,1],[7,1],[8,5],[8,1],[8,1],[8,5],[8,5],[8,1],[9,2],[9,1],[9,4],[9,4],[10,1],[10,1],[10,5],[10,5],[10,1],[10,1],[11,1],[11,1],[11,1],[11,2],[12,1],[12,2],[12,2],[12,1],[13,1],[13,1],[13,3],[14,1],[14,22],[14,1],[14,1],[14,2],[20,4],[27,1],[18,2],[49,1],[16,3],[15,1],[18,1],[15,1],[18,1],[15,1],[27,2],[21,1],[23,1],[54,1],[22,1],[46,1],[17,1],[37,7],[17,1],[19,1],[33,2],[62,1],[18,4],[18,1],[24,1],[18,1],[36,1],[20,1],[125,1],[18,13],[36,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,10],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,3],[1,8],[1,2],[1,4],[1,10],[1,1],[1,71],[1,1],[1,2],[1,18],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,34],[1,9],[1,2],[1,7],[1,3],[1,3],[1,3],[1,3],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,8],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,6],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,9],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,6],[1,1],[1,10],[1,1],[1,10],[1,1],[1,2],[1,2],[1,2],[1,3],[1,1],[1,2],[1,3],[1,2],[1,2],[1,20],[1,2],[1,3],[1,2],[1,1],[1,1],[1,5],[1,1],[1,5],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,10],[2,1],[2,1],[2,6],[2,3],[2,5],[2,3],[2,1],[2,1],[2,11],[2,2],[2,3],[2,2],[2,1],[2,7],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,3],[2,1],[2,3],[2,2],[2,1],[2,6],[2,3],[2,1],[2,1],[2,1],[3,4],[3,2],[3,1],[3,8],[3,1],[3,49],[3,2],[3,2],[3,3],[3,1],[3,2],[3,5],[3,3],[3,2],[3,1],[3,3],[3,1],[3,2],[3,13],[3,7],[3,2],[3,1],[4,2],[4,4],[4,1],[4,2],[4,1],[4,1],[4,1],[4,2],[5,1],[5,4],[5,1],[5,1],[5,1],[5,1],[5,1],[5,4],[5,1],[5,2],[6,1],[6,7],[6,1],[6,1],[6,4],[6,2],[6,3],[6,1],[6,9],[7,1],[7,1],[8,3],[8,7],[8,1],[8,2],[8,2],[8,2],[8,8],[8,1],[9,1],[9,1],[9,1],[9,2],[10,1],[11,3],[12,1],[12,1],[12,2],[12,1],[12,3],[13,1],[14,1],[58,1],[21,1],[36,15],[218,1],[34,1],[20,2],[16,2],[28,1],[38,1],[38,3],[16,1],[165,2],[132,1],[19,2],[260,1],[39,2],[64,1],[18,1],[1,1],[1,1],[1,1],[1,12],[1,1],[1,2],[1,1],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,13],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,2],[1,5],[1,1],[1,3],[1,2],[1,1],[1,2],[1,6],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,3],[1,6],[1,1],[1,1],[1,1],[1,6],[1,3],[1,2],[1,6],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,2],[1,63],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,2],[1,3],[1,9],[1,2],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,10],[1,1],[1,2],[1,1],[1,2],[1,2],[1,7],[1,1],[1,8],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,1],[1,15],[1,6],[1,1],[1,1],[1,422],[1,2],[1,2],[1,4],[1,2],[1,2],[1,3],[1,2],[1,3],[1,1],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[2,4],[2,3],[2,1],[2,2],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,2],[2,13],[2,11],[2,4],[2,1],[2,2],[2,10],[2,5],[2,2],[2,75],[2,3],[2,1],[2,8],[2,4],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,14],[2,2],[2,15],[2,1],[2,2],[2,4],[2,1],[2,1],[2,2],[2,33],[2,2],[2,1],[2,1],[2,3],[2,2],[2,2],[2,1],[3,1],[3,13],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,6],[3,7],[3,2],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,3],[3,2],[3,1],[3,6],[3,2],[3,4],[3,2],[4,4],[4,4],[4,4],[4,4],[4,6],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,5],[4,1],[5,4],[5,1],[5,2],[5,8],[5,3],[5,1],[5,1],[5,1],[5,1],[5,3],[6,1],[6,3],[6,2],[6,4],[6,1],[6,3],[6,1],[6,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,3],[8,1],[8,1],[8,1],[8,7],[9,2],[10,2],[10,1],[10,6],[11,1],[11,3],[11,2],[12,1],[12,1],[14,2],[14,6],[17,2],[19,1],[15,1],[112,1],[16,1],[30,6],[19,3],[15,4],[19,2],[25,1],[17,4],[49,1],[48,1],[26,1],[17,9],[43,3],[51,6],[17,1],[21,3],[26,4],[31,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,9],[1,1],[1,753],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,2],[1,6],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,3],[1,4],[1,3],[1,4],[1,1],[1,2],[1,1],[1,6],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,26],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,2],[1,3],[1,1],[1,5],[1,2],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,3],[1,1],[1,4],[1,8],[1,10],[1,1],[1,2],[1,6],[1,1],[1,2],[1,2],[1,2],[1,6],[1,1],[1,1],[1,15],[1,2],[2,1],[2,12],[2,1],[2,8],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,20],[2,2],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,14],[2,2],[2,1],[2,5],[2,5],[2,1],[2,2],[2,2],[2,6],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[3,2],[3,3],[3,3],[3,1],[3,1],[3,1],[3,3],[3,1],[3,1],[3,6],[3,8],[3,1],[3,1],[3,1],[3,3],[3,12],[3,1],[3,1],[3,1],[3,1],[3,6],[3,1],[3,2],[3,1],[3,1],[4,5],[4,1],[4,5],[4,5],[4,29],[4,11],[4,1],[4,1],[4,2],[4,1],[4,1],[5,2],[5,4],[5,1],[5,6],[5,1],[5,1],[5,1],[5,1],[6,1],[6,4],[6,1],[6,4],[6,2],[6,2],[6,1],[6,1],[6,2],[6,1],[7,1],[7,2],[7,1],[7,1],[7,2],[8,3],[8,4],[8,5],[8,7],[8,5],[9,5],[9,1],[9,1],[10,2],[10,2],[10,4],[11,1],[11,1],[12,8],[12,1],[12,1],[13,1],[13,1],[13,2],[14,2],[20,4],[18,3],[65,1],[23,1],[20,3],[237,1],[70,5],[80,2],[71,1],[15,4],[18,8],[54,1],[30,1],[15,2],[26,2],[20,1],[17,1],[26,4],[20,13],[1,2],[1,1],[1,3],[1,1],[1,3],[1,5],[1,3],[1,1],[1,5],[1,1],[1,3],[1,7],[1,2],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,11],[1,1],[1,6],[1,4],[1,3],[1,3],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,4],[1,1],[1,10],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,8],[1,1],[1,1],[1,2],[1,4],[1,1],[1,34],[1,2],[1,2],[1,1],[1,1],[1,4],[1,1],[1,3],[1,7],[1,4],[1,7],[1,7],[1,1],[1,3],[1,1],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,14],[1,6],[1,6],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[2,2],[2,1],[2,1],[2,4],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,2],[2,1],[2,2],[2,6],[2,1],[2,1],[2,1],[2,2],[2,2],[3,3],[3,7],[3,4],[3,2],[3,3],[3,1],[3,1],[3,4],[3,1],[3,14],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,9],[3,25],[3,1],[3,1],[4,1],[4,9],[4,1],[4,3],[4,1],[4,1],[4,12],[4,1],[4,3],[4,7],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[5,5],[5,2],[5,1],[5,1],[5,2],[5,5],[5,1],[5,1],[5,1],[5,1],[5,1],[6,5],[6,1],[6,3],[6,1],[6,4],[6,1],[6,1],[6,3],[6,2],[6,1],[7,1],[7,1],[7,1],[7,1],[7,1],[8,2],[8,1],[8,1],[8,1],[8,1],[9,2],[10,374],[10,3],[11,1],[11,1],[11,3],[11,8],[11,4],[12,1],[13,3],[13,2],[13,4],[58,1],[43,1],[38,1],[196,1],[55,3],[15,1],[79,1],[16,5],[20,1],[32,1],[111,1],[68,1],[50,17],[327,47],[46,3],[24,3],[41,2],[65,1],[1,2],[1,14],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,7],[1,4],[1,5],[1,8],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,6],[1,2],[1,1],[1,5],[1,1],[1,3],[1,29],[1,4],[1,2],[1,1],[1,1],[1,4],[1,2],[1,9],[1,5],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,4],[1,2],[1,1],[1,8],[1,2],[1,13],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,4],[1,6],[1,1],[1,1],[1,3],[1,2],[1,4],[1,2],[1,10],[1,2],[1,2],[1,2],[1,1],[1,4],[1,2],[1,1],[1,5],[1,93],[1,1],[1,1],[1,3],[1,22],[1,1],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,4],[1,1],[1,6],[1,1],[1,3],[1,4],[1,1],[1,1],[1,2],[1,2],[1,8],[1,3],[1,1],[1,5],[1,6],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,2],[1,1],[1,2],[1,2],[1,2],[1,28],[1,1],[1,6],[1,6],[1,2],[2,1],[2,2],[2,1],[2,2],[2,1],[2,2],[2,6],[2,1],[2,1],[2,2],[2,6],[2,2],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,2],[2,2],[2,6],[2,3],[2,3],[2,1],[2,2],[2,2],[2,1],[2,1],[2,14],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,9],[2,2],[2,1],[2,5],[2,1],[2,1],[2,3],[2,2],[2,2],[2,7],[2,16],[2,6],[2,2],[2,2],[2,1],[2,2],[3,1],[3,26],[3,1],[3,2],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,4],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,12],[3,2],[3,2],[3,4],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,1],[4,1],[4,2],[4,1],[4,8],[4,3],[4,1],[4,4],[5,2],[5,2],[5,1],[5,1],[5,1],[5,9],[6,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,10],[6,1],[7,1],[7,11],[7,4],[7,1],[7,2],[8,2],[8,1],[8,1],[8,1],[8,1],[8,4],[8,7],[9,1],[9,1],[10,2],[10,4],[10,1],[10,1],[11,6],[12,1],[12,1],[12,6],[13,1],[13,5],[13,2],[13,11],[14,8],[14,3],[16,1],[55,1],[17,1],[91,1],[27,1],[16,1],[17,1],[37,1],[54,3],[73,2],[50,1],[19,3],[20,2],[26,1],[55,3],[54,1],[31,1],[68,2],[75,8],[412,1],[21,2],[1,6],[1,1],[1,2],[1,2],[1,4],[1,4],[1,2],[1,6],[1,5],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,9],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,1],[1,2],[1,3],[1,12],[1,16],[1,3],[1,1],[1,1],[1,3],[1,3],[1,502],[1,3],[1,1],[1,1],[1,5],[1,2],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,6],[1,3],[1,2],[1,1],[1,5],[1,1],[1,6],[1,4],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,17],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,4],[1,6],[1,1],[1,1],[1,11],[1,1],[1,4],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,5],[1,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,2],[2,9],[2,2],[2,1],[2,9],[2,1],[2,2],[2,2],[2,2],[2,5],[2,5],[2,2],[2,1],[2,2],[2,1],[2,1],[2,13],[2,5],[2,2],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,2],[2,3],[2,3],[2,5],[2,3],[2,3],[2,10],[2,2],[2,2],[2,2],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,3],[3,2],[3,2],[3,1],[3,7],[3,2],[3,2],[3,1],[3,5],[3,2],[3,3],[3,1],[3,8],[3,1],[3,1],[3,2],[3,14],[3,2],[4,2],[4,1],[4,2],[4,3],[4,2],[4,7],[4,1],[4,5],[4,1],[4,3],[4,10],[4,1],[4,2],[4,4],[4,4],[4,1],[5,1],[5,4],[5,2],[5,1],[5,1],[5,2],[5,8],[5,3],[5,1],[5,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,2],[6,15],[6,39],[6,3],[7,2],[7,1],[7,3],[7,1],[7,1],[8,1],[8,1],[9,2],[9,2],[9,1],[9,1],[10,1],[10,1],[10,1],[11,14],[11,1],[11,3],[11,1],[12,1],[12,1],[13,2],[13,2],[14,8],[16,1],[27,1],[21,5],[18,2],[36,1],[36,3],[28,15],[17,13],[18,7],[17,9],[28,2],[19,2],[27,1],[33,11],[40,2],[17,3],[120,2],[136,4],[21,1],[64,1],[23,3],[81,4],[27,1],[126,15],[17,1],[37,2],[21,1],[22,1],[58,1],[1,85],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,3],[1,9],[1,2],[1,3],[1,7],[1,3],[1,2],[1,5],[1,2],[1,1],[1,3],[1,1],[1,1],[1,4],[1,13],[1,74],[1,14],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,2],[1,5],[1,1],[1,4],[1,1],[1,4],[1,1],[1,1],[1,3],[1,2],[1,79],[1,1],[1,1],[1,6],[1,1],[1,2],[1,7],[1,2],[1,1],[1,2],[1,1],[1,7],[1,1],[1,2],[1,1],[1,4],[1,4],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,2],[1,6],[1,1],[1,8],[1,2],[1,2],[1,1],[1,9],[1,1],[1,2],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,11],[1,1],[1,5],[1,1],[1,4],[1,3],[1,8],[1,4],[1,1],[1,9],[1,1],[1,3],[1,1],[1,4],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,3],[1,8],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,11],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[2,6],[2,1],[2,3],[2,1],[2,3],[2,7],[2,6],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,1],[2,4],[2,3],[2,2],[2,1],[2,6],[2,1],[2,3],[2,2],[2,2],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,4],[2,5],[2,1],[2,1],[3,1],[3,57],[3,2],[3,1],[3,1],[3,2],[3,3],[3,15],[3,4],[3,1],[3,1],[3,9],[3,10],[3,5],[3,1],[3,4],[3,4],[3,1],[3,1],[3,6],[3,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,14],[4,3],[4,1],[4,1],[4,3],[4,10],[4,1],[4,2],[5,10],[5,1],[5,1],[5,3],[5,1],[5,5],[5,1],[6,5],[6,4],[6,2],[6,2],[6,3],[6,1],[7,1],[7,1],[7,4],[7,1],[7,2],[7,2],[7,2],[7,2],[8,2],[8,1],[8,4],[8,2],[8,4],[8,1],[9,1],[9,1],[10,3],[10,1],[11,1],[11,1],[12,9],[12,4],[12,2],[13,7],[13,4],[13,2],[13,7],[13,1],[14,1],[14,1],[23,1],[19,2],[16,1],[36,4],[15,4],[22,3],[17,1],[17,2],[38,2],[15,1],[34,1],[29,2],[20,7],[23,4],[44,5],[22,2],[18,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,9],[1,1],[1,4],[1,2],[1,2],[1,1],[1,5],[1,1],[1,2],[1,1],[1,4],[1,2],[1,2],[1,1],[1,3],[1,3],[1,3],[1,2],[1,3],[1,1],[1,2],[1,5],[1,3],[1,1],[1,4],[1,1],[1,6],[1,4],[1,3],[1,1],[1,2],[1,1],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,8],[1,1],[1,2],[1,5],[1,1],[1,6],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,3],[1,10],[1,3],[1,7],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,43],[1,23],[1,2],[1,4],[1,33],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,7],[1,2],[1,4],[1,6],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,136],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,20],[2,1],[2,1],[2,16],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,1],[2,1],[2,2],[2,7],[2,2],[2,1],[2,2],[2,114],[2,1],[2,3],[2,4],[2,1],[2,4],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,6],[2,2],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,4],[2,2],[2,4],[2,3],[2,2],[2,1],[3,2],[3,1],[3,1],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,8],[3,2],[3,1],[3,2],[3,28],[3,1],[3,118],[3,1],[3,1],[3,2],[3,2],[3,3],[3,8],[3,3],[4,1],[4,2],[4,4],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,2],[4,1],[4,1],[4,3],[4,1],[4,3],[4,1],[4,1],[4,1],[5,2],[5,1],[5,6],[5,1],[5,4],[5,2],[5,4],[5,1],[5,4],[6,4],[6,1],[6,3],[6,1],[6,2],[6,1],[7,1],[7,3],[7,1],[7,46],[7,2],[7,1],[8,3],[8,6],[8,1],[8,5],[9,12],[9,1],[9,5],[10,3],[10,3],[11,3],[11,7],[12,3],[12,1],[12,1],[13,1],[13,1],[13,2],[13,13],[13,1],[14,1],[14,1],[58,2],[112,1],[18,3],[19,1],[20,1],[18,1],[15,2],[92,1],[50,1],[40,1],[57,5],[19,2],[19,1],[15,4],[16,5],[54,1],[15,1],[1,2],[1,6],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,6],[1,7],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,11],[1,3],[1,6],[1,1],[1,1],[1,6],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,12],[1,1],[1,1],[1,1],[1,4],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,5],[1,2],[1,1],[1,1],[1,2],[1,8],[1,2],[1,1],[1,1],[1,2],[1,1],[1,19],[1,1],[1,1],[1,4],[1,1],[1,4],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,3],[1,5],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,3],[1,9],[1,26],[1,3],[1,17],[1,1],[1,2],[1,1],[1,5],[1,4],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,8],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,30],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,2],[2,3],[2,4],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,7],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,10],[2,4],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,3],[2,7],[2,1],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,1],[3,2],[3,29],[3,2],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,3],[4,1],[5,2],[5,1],[5,1],[5,4],[5,1],[5,1],[5,2],[5,1],[5,1],[5,3],[6,4],[6,1],[6,1],[6,3],[6,2],[6,2],[6,1],[6,1],[6,1],[6,2],[7,2],[7,3],[7,2],[7,1],[7,2],[8,1],[8,1],[8,4],[8,1],[8,3],[9,1],[9,5],[9,1],[9,1],[9,1],[11,1],[11,2],[11,2],[11,3],[12,7],[12,1],[13,1],[14,2],[16,1],[78,3],[17,3],[27,3],[19,2],[67,3],[16,3],[58,3],[17,1],[29,2],[29,1],[23,1],[390,2],[75,2],[26,8],[20,3],[19,2],[16,4],[33,1],[66,2],[20,1],[17,5],[1,1],[1,2],[1,1],[1,1],[1,9],[1,4],[1,2],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,4],[1,5],[1,11],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,1],[1,2],[1,3],[1,1],[1,1],[1,3],[1,1],[1,7],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,8],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,6],[1,2],[1,1],[1,11],[1,3],[1,1],[1,2],[1,4],[1,4],[1,1],[1,11],[1,7],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,14],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,3],[1,6],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,7],[1,5],[1,2],[1,7],[1,7],[1,1],[1,3],[1,2],[1,4],[1,4],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,5],[1,3],[1,1],[1,124],[1,2],[1,6],[1,1],[1,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,5],[2,21],[2,2],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,7],[2,31],[2,1],[2,2],[2,4],[2,1],[2,3],[2,125],[2,1],[2,8],[2,1],[2,4],[2,2],[2,2],[2,1],[2,1],[2,1],[2,4],[2,5],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,8],[2,1],[2,12],[2,278],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[3,1],[3,3],[3,2],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,3],[3,1],[3,2],[3,3],[3,1],[4,2],[4,8],[4,1],[4,3],[4,3],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,3],[5,1],[5,1],[5,1],[5,2],[5,2],[5,2],[5,1],[6,2],[6,2],[6,24],[6,2],[6,2],[6,20],[6,1],[6,1],[6,3],[6,1],[6,4],[6,5],[6,3],[7,2],[7,1],[7,4],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,134],[8,1],[8,1],[8,5],[8,1],[8,6],[9,3],[9,15],[10,4],[10,3],[10,1],[11,12],[11,2],[12,2],[12,2],[14,1],[14,6],[15,3],[30,2],[35,1],[28,1],[111,1],[22,1],[25,1],[18,1],[40,4],[58,1],[295,4],[18,3],[35,1],[16,1],[1,1],[1,1],[1,2],[1,1],[1,6],[1,6],[1,2],[1,1],[1,301],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,5],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,7],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,5],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,17],[1,1],[1,1],[1,2],[1,2],[1,4],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,23],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,4],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,3],[1,15],[1,4],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,10],[2,3],[2,1],[2,1],[2,2],[2,7],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,3],[2,6],[2,1],[2,1],[2,46],[2,1],[2,3],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,2],[2,4],[2,4],[2,3],[3,11],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,4],[3,1],[3,1],[3,1],[3,3],[3,2],[3,1],[3,2],[3,2],[3,2],[3,1],[3,3],[3,1],[3,2],[3,2],[3,4],[3,1],[3,45],[3,2],[4,11],[4,2],[4,1],[4,2],[4,4],[4,14],[4,4],[4,2],[4,2],[4,1],[5,3],[5,1],[5,1],[5,2],[5,1],[5,2],[5,3],[5,2],[5,1],[5,2],[5,2],[6,1],[6,1],[6,3],[6,2],[6,1],[6,3],[6,1],[6,6],[7,1],[7,2],[7,1],[8,1],[8,2],[8,1],[8,1],[8,1],[8,2],[8,2],[8,2],[9,5],[9,2],[10,1],[10,1],[10,3],[11,8],[11,1],[12,5],[12,1],[14,1]])\n #data = np.array([[26,2],[18,3],[30,4],[19,2],[21,1],[40,1],[17,3],[20,3],[19,3],[15,4],[246,1],[57,2],[16,2],[44,101],[31,1],[19,2],[35,2],[25,1],[28,1],[82,1],[52,11],[19,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,4],[1,1],[1,7],[1,9],[1,1],[1,2],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,9],[1,1],[1,1],[1,1],[1,2],[1,6],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,13],[1,1],[1,4],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,7],[1,2],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,1],[1,4],[1,3],[1,2],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,1],[1,3],[1,37],[1,1],[1,2],[1,1],[1,1],[1,50],[1,1],[1,1],[1,1],[1,8],[1,1],[1,1],[1,1],[1,6],[1,2],[1,3],[1,3],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,2],[1,15],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,9],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,12],[2,3],[2,3],[2,1],[2,1],[2,1],[2,4],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,3],[2,2],[2,1],[2,13],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,8],[2,3],[2,1],[2,1],[2,13],[2,2],[2,1],[2,2],[2,3],[2,1],[2,1],[3,1],[3,2],[3,5],[3,1],[3,1],[3,11],[3,3],[3,1],[3,1],[3,6],[3,1],[3,3],[3,1],[3,2],[3,4],[3,2],[3,2],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,1],[4,2],[4,2],[4,9],[4,1],[4,1],[4,5],[4,1],[4,16],[4,1],[4,2],[4,1],[4,1],[4,1],[4,6],[4,2],[4,2],[5,2],[5,2],[5,2],[5,2],[5,3],[5,1],[6,3],[6,1],[6,4],[6,1],[7,1],[7,1],[7,2],[7,1],[7,1],[8,7],[8,1],[8,1],[9,1],[9,3],[9,2],[9,1],[10,1],[10,11],[11,1],[11,2],[12,4],[13,11],[13,2],[14,3],[22,1],[39,3],[107,1],[46,6],[22,1],[15,1],[29,45],[29,1],[35,1],[23,2],[21,1],[17,1],[57,1],[20,1],[19,4],[24,1],[18,2],[61,2],[51,12],[41,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,6],[1,2],[1,1],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,4],[1,7],[1,3],[1,1],[1,15],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,4],[1,2],[1,2],[1,1],[1,4],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,5],[1,8],[1,1],[1,1],[1,2],[1,2],[1,134],[1,45],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,6],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,19],[1,4],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,19],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,5],[1,3],[1,6],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,2],[1,1],[1,26],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,5],[1,4],[1,1],[1,27],[1,1],[1,1],[1,1],[1,11],[1,2],[1,4],[1,1],[1,1],[1,24],[1,2],[1,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,15],[2,1],[2,1],[2,1],[2,3],[2,1],[2,5],[2,1],[2,4],[2,1],[2,1],[2,5],[2,2],[2,1],[2,1],[2,2],[2,1],[2,3],[2,4],[2,1],[2,3],[2,1],[2,2],[2,17],[2,4],[2,2],[2,7],[2,2],[2,1],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,3],[3,1],[3,18],[3,1],[3,1],[3,1],[3,6],[3,8],[3,1],[3,1],[3,2],[3,2],[3,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,4],[4,1],[4,20],[4,2],[4,4],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,3],[4,4],[4,2],[4,2],[4,1],[4,1],[5,3],[5,1],[5,1],[6,1],[6,8],[7,1],[7,1],[7,5],[8,21],[8,1],[8,1],[8,2],[9,1],[10,30],[10,2],[10,3],[10,1],[11,1],[11,2],[11,1],[11,1],[12,1],[12,3],[12,6],[13,1],[13,2],[13,1],[14,1],[14,2],[17,1],[52,1],[64,1],[190,2],[25,3],[19,3],[22,1],[15,2],[25,1],[25,2],[38,1],[69,1],[1,1],[1,4],[1,1],[1,21],[1,1],[1,3],[1,11],[1,31],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,2],[1,212],[1,6],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,3],[1,7],[1,2],[1,5],[1,3],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,9],[1,1],[1,2],[1,2],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,78],[1,3],[1,7],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,3],[1,2],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,8],[2,1],[2,1],[2,5],[2,2],[2,1],[2,6],[2,1],[2,4],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,30],[2,3],[2,5],[2,4],[2,3],[2,1],[2,1],[3,1],[3,2],[3,1],[3,11],[3,1],[3,1],[3,8],[3,2],[3,1],[3,4],[3,3],[3,2],[3,3],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,8],[4,1],[4,2],[4,1],[4,2],[4,1],[4,3],[4,1],[4,2],[4,7],[4,1],[4,1],[4,1],[4,1],[4,7],[5,1],[5,1],[5,2],[5,2],[5,1],[5,11],[5,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,2],[5,8],[5,1],[6,2],[6,8],[6,1],[6,1],[6,1],[6,2],[6,1],[6,2],[6,1],[7,1],[7,3],[7,1],[7,2],[7,6],[7,2],[8,1],[8,6],[8,15],[9,2],[10,3],[10,1],[10,1],[10,2],[10,5],[10,2],[10,64],[11,1],[11,1],[11,1],[12,1],[12,6],[12,1],[12,2],[14,4],[14,1],[17,1],[21,1],[17,1],[32,1],[16,1],[18,5],[17,1],[16,1],[17,2],[262,1],[22,1],[227,5],[82,4],[28,3],[56,7],[42,2],[26,1],[137,1],[55,19],[29,1],[42,2],[1,5],[1,1],[1,2],[1,22],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,4],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,5],[1,7],[1,2],[1,2],[1,1],[1,1],[1,7],[1,1],[1,1],[1,1],[1,2],[1,3],[1,16],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,4],[1,28],[1,6],[1,1],[1,2],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,16],[1,1],[1,2],[1,3],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,2],[1,4],[1,3],[1,4],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,5],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[2,5],[2,5],[2,4],[2,2],[2,32],[2,1],[2,1],[2,4],[2,3],[2,1],[2,1],[2,1],[2,45],[2,3],[2,11],[2,1],[2,1],[2,2],[2,1],[2,4],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,8],[2,2],[2,2],[2,1],[2,2],[2,2],[2,1],[2,7],[2,4],[2,2],[2,4],[2,1],[2,8],[3,1],[3,1],[3,1],[3,3],[3,4],[3,1],[3,10],[3,6],[3,1],[3,1],[3,1],[3,2],[3,4],[3,4],[3,1],[3,1],[3,7],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,19],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,1],[4,2],[4,1],[4,9],[4,4],[4,5],[4,3],[4,2],[4,3],[5,1],[5,2],[5,20],[5,1],[5,2],[5,2],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,4],[5,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,1],[6,1],[6,6],[6,2],[7,1],[7,1],[7,1],[7,4],[8,1],[8,5],[8,14],[9,1],[9,4],[10,1],[10,1],[10,1],[10,1],[11,6],[11,4],[12,1],[12,2],[13,2],[13,1],[13,6],[14,2],[42,4],[264,3],[22,3],[15,6],[19,1],[46,2],[193,1],[15,1],[127,5],[47,1],[16,2],[27,1],[25,1],[19,5],[73,1],[60,1],[27,1],[19,2],[1,2],[1,1],[1,2],[1,2],[1,4],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,16],[1,2],[1,3],[1,2],[1,1],[1,4],[1,20],[1,3],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,3],[1,4],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,1],[1,47],[1,2],[1,2],[1,5],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,16],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,5],[1,2],[1,7],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,14],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,3],[1,4],[1,5],[1,1],[1,1],[1,1],[1,17],[1,71],[1,1],[1,1],[1,1],[1,79],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,7],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[2,1],[2,1],[2,1],[2,4],[2,13],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,6],[2,3],[2,1],[2,1],[2,1],[2,2],[2,17],[2,2],[2,2],[2,8],[2,1],[2,3],[2,2],[2,11],[2,1],[2,2],[2,5],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,3],[2,4],[2,1],[2,6],[2,25],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,3],[3,8],[3,5],[3,3],[3,7],[3,1],[3,1],[3,9],[3,6],[3,3],[3,2],[3,8],[3,4],[3,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[4,1],[4,3],[4,2],[4,1],[4,3],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[5,1],[5,5],[5,3],[5,2],[5,3],[5,1],[5,3],[6,1],[6,1],[6,1],[6,1],[7,1],[7,1],[7,1],[7,1],[7,32],[7,2],[7,1],[7,4],[7,1],[7,1],[7,4],[8,2],[8,2],[8,1],[8,2],[8,1],[9,1],[9,3],[9,1],[9,1],[9,1],[10,3],[11,4],[11,1],[11,1],[11,3],[11,3],[11,1],[12,1],[12,1],[12,1],[13,2],[13,1],[13,2],[14,5],[26,2],[49,1],[26,1],[18,1],[27,1],[15,1],[23,1],[58,3],[36,2],[19,3],[62,2],[72,2],[90,1],[124,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,3],[1,1],[1,4],[1,2],[1,1],[1,1],[1,18],[1,1],[1,2],[1,4],[1,24],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,3],[1,1303],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,2],[1,1],[1,1],[1,1],[1,1],[1,8],[1,10],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,17],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,4],[1,2],[1,1],[1,2],[1,25],[1,2],[1,7],[1,1],[1,1],[1,6],[1,1],[1,3],[1,2],[1,4],[1,1],[1,1],[1,6],[1,1],[1,2],[1,3],[1,1],[1,4],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[2,1],[2,5],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,2],[2,6],[2,1],[2,2],[2,1],[2,3],[2,1],[2,2],[2,3],[2,13],[2,1],[2,2],[2,1],[2,3],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,3],[2,2],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,5],[3,2],[3,2],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,3],[3,2],[3,2],[3,1],[3,1],[3,1],[3,1],[3,5],[3,1],[3,4],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,3],[4,3],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[5,1],[5,2],[5,9],[5,2],[5,1],[5,7],[5,2],[5,1],[5,2],[5,2],[5,1],[6,3],[6,1],[6,1],[6,1],[6,1],[6,1],[6,1],[6,29],[6,2],[7,3],[7,2],[7,1],[7,1],[7,2],[7,2],[7,2],[7,3],[7,2],[8,5],[8,1],[8,1],[8,3],[8,2],[8,1],[8,2],[9,1],[9,1],[10,1],[10,14],[10,3],[10,4],[10,3],[10,4],[11,1],[11,5],[11,2],[11,3],[11,1],[11,1],[11,2],[12,1],[12,1],[13,5],[13,1],[13,1],[14,1],[14,3],[14,1],[24,1],[15,1],[19,2],[15,5],[131,1],[28,13],[33,1],[24,1],[17,1],[15,1],[44,2],[16,2],[16,3],[29,7],[29,1],[82,8],[16,1],[17,2],[16,2],[45,1],[159,1],[100,2],[23,1],[15,1],[15,1],[22,1],[48,1],[25,5],[15,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,4],[1,44],[1,1],[1,2],[1,40],[1,1],[1,9],[1,1],[1,17],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,25],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,12],[1,1],[1,2],[1,12],[1,2],[1,2],[1,5],[1,2],[1,3],[1,7],[1,5],[1,72],[1,2],[1,8],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,2],[1,5],[1,3],[1,2],[1,3],[1,382],[1,1],[1,3],[1,1],[1,1],[1,6],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,2],[1,6],[1,1],[1,3],[1,3],[1,1],[1,6],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,1],[1,2],[2,1],[2,1],[2,1],[2,1],[2,12],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,52],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,9],[2,1],[2,1],[2,18],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[3,6],[3,3],[3,4],[3,1],[3,1],[3,1],[3,1],[3,1],[3,4],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,80],[3,1],[3,2],[3,1],[3,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,1],[4,4],[4,4],[4,1],[4,2],[4,2],[4,1],[4,2],[4,1],[4,1],[5,1],[5,1],[5,3],[5,3],[5,1],[5,1],[5,1],[5,2],[5,1],[6,4],[6,3],[6,1],[6,6],[6,1],[6,1],[7,2],[7,1],[7,1],[7,2],[7,1],[7,2],[7,1],[7,1],[8,1],[8,4],[8,1],[8,2],[8,3],[9,2],[9,3],[9,3],[9,6],[10,1],[10,1],[10,1],[10,1],[11,8],[11,1],[11,1],[12,2],[13,5],[15,1],[35,7],[16,1],[24,2],[16,1],[25,1],[65,4],[36,1],[16,5],[21,10],[18,1],[16,12],[29,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,5],[1,3],[1,3],[1,3],[1,1],[1,4],[1,3],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,2],[1,4],[1,2],[1,7],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,4],[1,8],[1,6],[1,1],[1,4],[1,1],[1,1],[1,3],[1,1],[1,3],[1,2],[1,7],[1,2],[1,5],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,5],[1,1],[1,13],[1,3],[1,2],[1,1],[1,1],[1,10],[1,1],[1,2],[1,1],[1,3],[1,12],[1,2],[1,2],[1,4],[1,1],[1,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,4],[2,3],[2,1],[2,1],[2,1],[2,6],[2,1],[2,6],[2,1],[2,2],[2,6],[2,1],[2,10],[2,1],[2,1],[2,4],[2,1],[2,3],[2,3],[2,1],[2,1],[2,3],[2,5],[2,3],[2,10],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,3],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[3,2],[3,1],[3,1],[3,1],[3,5],[3,34],[3,2],[3,3],[3,1],[3,1],[3,2],[3,1],[3,5],[3,1],[3,1],[3,2],[3,4],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,25],[3,1],[3,1],[4,1],[4,6],[4,3],[4,1],[4,6],[4,1],[4,1],[4,4],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,3],[4,4],[5,1],[5,2],[5,3],[5,1],[5,1],[5,1],[5,4],[5,1],[5,2],[5,4],[5,1],[5,1],[6,1],[6,4],[6,2],[6,1],[6,1],[6,2],[6,3],[7,11],[7,1],[7,5],[8,2],[8,1],[8,1],[9,2],[9,5],[9,4],[9,3],[9,1],[9,2],[9,2],[10,1],[10,2],[11,1],[12,3],[12,1],[13,11],[13,1],[17,1],[201,2],[16,2],[104,4],[123,2],[15,1],[26,5],[74,1],[15,3],[15,7],[16,1],[39,2],[27,1],[32,1],[53,4],[28,1],[25,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,16],[1,3],[1,2],[1,2],[1,3],[1,1],[1,1],[1,3],[1,11],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,4],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,32],[1,2],[1,1],[1,1],[1,6],[1,1],[1,7],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,55],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,5],[1,4],[1,7],[1,1],[1,1],[1,6],[1,2],[1,2],[1,6],[1,3],[1,2],[1,1],[1,6],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,4],[1,9],[1,2],[1,3],[1,1],[2,1],[2,1],[2,11],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,4],[2,1],[2,2],[2,2],[2,2],[2,3],[2,4],[2,2],[2,5],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,6],[3,2],[3,1],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,5],[3,1],[3,1],[3,2],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,3],[4,3],[4,1],[4,4],[4,1],[4,2],[4,1],[4,3],[4,1],[5,1],[5,2],[5,1],[5,3],[5,3],[5,1],[5,2],[5,9],[5,1],[5,1],[5,2],[5,1],[5,2],[6,2],[6,3],[6,1],[6,1],[6,2],[6,1],[6,2],[6,2],[6,1],[6,4],[6,2],[7,7],[7,2],[7,4],[7,1],[7,2],[7,19],[7,1],[7,1],[7,1],[8,1],[8,12],[8,1],[8,3],[8,1],[9,1],[9,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,4],[10,2],[12,3],[12,1],[12,1],[13,1],[13,1],[14,1],[14,1],[14,3],[30,7],[32,1],[40,2],[16,1],[91,6],[122,1],[15,1],[17,1],[20,3],[19,2],[19,1],[98,2],[81,14],[47,4],[38,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,83],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,4],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,7],[1,1],[1,2],[1,4],[1,1],[1,1],[1,88],[1,2],[1,2],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,57],[1,2],[1,6],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,5],[1,5],[1,1],[1,1],[1,9],[1,1],[1,1],[1,3],[1,4],[1,1],[1,2],[1,5],[1,2],[1,3],[1,1],[1,2],[1,4],[1,4],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,6],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,2],[2,2],[2,15],[2,4],[2,1],[2,1],[2,2],[2,1],[2,2],[2,3],[2,3],[2,3],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,7],[2,1],[2,4],[2,3],[2,2],[2,3],[2,1],[2,1],[2,2],[3,4],[3,1],[3,1],[3,2],[3,3],[3,6],[3,2],[3,9],[3,9],[3,2],[3,2],[3,1],[3,15],[3,1],[3,1],[3,1],[3,3],[4,1],[4,1],[4,2],[4,3],[4,1],[4,2],[4,1],[4,6],[4,2],[4,8],[4,9],[4,1],[4,1],[4,1],[5,1],[5,1],[5,78],[5,1],[5,1],[5,1],[5,17],[5,1],[5,3],[5,2],[5,1],[6,1],[6,1],[6,5],[6,19],[6,1],[6,6],[6,1],[6,1],[6,2],[6,1],[6,1],[6,1],[6,2],[6,1],[7,2],[7,1],[7,1],[7,4],[7,1],[7,28],[7,1],[8,1],[8,1],[8,1],[9,3],[9,1],[9,11],[9,4],[10,1],[10,2],[11,1],[11,1],[11,1],[11,1],[12,1],[14,2],[14,2],[14,2],[18,2],[31,1],[29,2],[16,1],[17,20],[25,1],[20,3],[59,1],[25,1],[27,2],[26,1],[44,1],[17,4],[16,4],[20,6],[67,2],[15,1],[65,1],[17,1],[33,1],[61,2],[1,2],[1,2],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,4],[1,5],[1,2],[1,1],[1,1],[1,18],[1,1],[1,3],[1,1],[1,2],[1,1],[1,2],[1,2],[1,5],[1,4],[1,1],[1,4],[1,1],[1,1],[1,1],[1,56],[1,1],[1,4],[1,1],[1,9],[1,6],[1,9],[1,1],[1,2],[1,1],[1,1],[1,1],[1,18],[1,10],[1,1],[1,5],[1,1],[1,1],[1,2],[1,5],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,8],[1,3],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,5],[1,2],[1,27],[1,3],[1,1],[1,2],[1,9],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,15],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,17],[1,1],[1,4],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,18],[1,1],[1,2],[1,46],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,7],[1,8],[1,1],[1,3],[1,6],[2,1],[2,1],[2,1],[2,1],[2,5],[2,4],[2,1],[2,2],[2,2],[2,4],[2,2],[2,1],[2,2],[2,1],[2,3],[2,5],[2,1],[2,2],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,12],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,3],[2,1],[2,2],[2,1],[2,10],[2,2],[2,8],[2,2],[2,2],[2,1],[2,5],[2,5],[2,4],[2,1],[2,1],[2,1],[2,1],[3,2],[3,6],[3,2],[3,1],[3,58],[3,1],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,6],[3,10],[3,1],[3,4],[3,1],[3,1],[3,6],[3,1],[3,29],[3,2],[3,2],[3,6],[3,1],[4,1],[4,4],[4,2],[4,1],[4,46],[4,2],[4,1],[4,2],[4,2],[4,3],[4,11],[4,3],[4,1],[4,2],[4,1],[4,15],[4,2],[5,5],[5,9],[5,1],[5,2],[5,136],[5,48],[5,5],[5,1],[5,1],[5,1],[5,1],[5,1],[6,1],[6,1],[6,10],[6,1],[6,2],[6,1],[7,2],[7,1],[7,3],[7,2],[7,11],[7,6],[7,1],[8,1],[8,3],[8,2],[8,1],[8,12],[8,2],[8,2],[9,1],[9,1],[9,1],[9,4],[10,1],[10,2],[11,2],[12,9],[13,1],[14,2],[21,1],[26,1],[16,2],[2230,1],[29,1],[16,5],[401,3],[33,1],[19,31],[15,4],[28,2],[23,1],[42,4],[40,1],[70,1],[15,3],[15,2],[22,1],[103,1],[256,27],[41,1],[86,1],[17,1],[31,1],[26,1],[105,2],[28,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,6],[1,4],[1,1],[1,4],[1,7],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,2],[1,2],[1,8],[1,1],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,2],[1,2],[1,2],[1,2],[1,1],[1,9],[1,1],[1,2],[1,2],[1,3],[1,2],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,5],[1,1],[1,29],[1,1],[1,4],[1,2],[1,3],[1,3],[1,17],[1,6],[1,2],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,9],[1,3],[1,1],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,1],[1,7],[1,1],[1,5],[1,1],[1,1],[1,4],[1,1],[1,2],[1,6],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,16],[1,5],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,8],[2,3],[2,1],[2,2],[2,4],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,9],[2,1],[2,23],[2,1],[2,1],[2,1],[2,2],[2,3],[2,1],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,25],[2,2],[2,3],[2,2],[2,1],[2,1],[2,3],[2,1],[2,3],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[3,1],[3,2],[3,2],[3,3],[3,2],[3,1],[3,1],[3,5],[3,9],[3,1],[3,3],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,9],[3,1],[3,2],[3,7],[3,3],[3,4],[3,2],[3,1],[3,37],[3,1],[3,1],[3,1],[3,1],[4,1],[4,2],[4,305],[4,4],[4,1],[4,1],[4,1],[4,4],[4,3],[4,1],[4,6],[4,7],[4,1],[4,1],[4,1],[4,1],[4,29],[4,1],[5,10],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[5,1],[6,2],[6,1],[6,1],[6,2],[7,1],[7,1],[7,2],[7,1],[7,1],[7,1],[7,2],[8,1],[8,3],[8,2],[9,1],[9,1],[10,1],[10,3],[10,1],[11,6],[11,2],[11,1],[11,1],[12,5],[12,4],[12,1],[14,1],[14,1],[23,1],[26,2],[15,2],[16,16],[31,7],[18,3],[22,3],[87,1],[17,2],[17,9],[30,1],[58,4],[24,2],[28,5],[53,1],[23,1],[28,2],[44,1],[60,3],[17,2],[17,1],[1,1],[1,2],[1,1],[1,11],[1,1],[1,1],[1,2],[1,2],[1,3],[1,2],[1,6],[1,3],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,3],[1,2],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,3],[1,1],[1,5],[1,3],[1,3],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,3],[1,5],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,8],[1,15],[1,1],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,3],[1,15],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,5],[1,3],[1,1],[1,1],[1,14],[1,1],[1,2],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,1],[1,5],[1,2],[1,3],[1,1],[1,2],[1,9],[1,1],[1,4],[1,1],[1,2],[1,8],[1,1],[1,3],[1,1],[1,1],[1,4],[1,4],[1,3],[1,1],[1,1],[1,9],[1,2],[1,4],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,2],[1,3],[1,2],[1,6],[1,1],[1,18],[2,1],[2,3],[2,3],[2,1],[2,6],[2,1],[2,2],[2,2],[2,5],[2,1],[2,1],[2,1],[2,3],[2,2],[2,6],[2,1],[2,3],[2,3],[2,1],[2,3],[2,2],[2,2],[2,1],[2,1],[2,9],[2,5],[2,1],[2,1],[2,1],[2,2],[2,85],[2,60],[2,2],[2,1],[2,12],[2,1],[2,1],[2,1],[2,8],[2,1],[2,21],[2,1],[2,3],[2,1],[2,1],[2,8],[2,1],[2,1],[3,3],[3,3],[3,1],[3,3],[3,3],[3,1],[3,2],[3,2],[3,1],[3,1],[3,14],[3,1],[3,6],[3,1],[3,2],[3,1],[3,3],[3,2],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,2],[4,3],[4,2],[4,1],[4,3],[4,1],[4,1],[4,2],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,4],[5,1],[5,1],[5,1],[5,3],[5,2],[5,1],[5,4],[6,6],[6,1],[6,18],[6,1],[6,1],[6,1],[6,5],[6,2],[6,3],[6,2],[7,3],[7,5],[7,2],[7,1],[7,3],[7,5],[7,1],[7,1],[7,1],[7,1],[8,1],[8,1],[8,3],[8,1],[8,1],[8,4],[9,1],[9,2],[9,4],[10,2],[10,1],[11,2],[11,1],[11,1],[12,3],[13,1],[14,2],[32,7],[26,2],[22,2],[15,1],[26,46],[15,2],[16,1],[19,1],[36,1],[16,2],[24,1],[20,5],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,1],[1,10],[1,5],[1,13],[1,2],[1,3],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,8],[1,1],[1,3],[1,5],[1,1],[1,2],[1,2],[1,2],[1,4],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,8],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[1,4],[1,3],[1,2],[1,9],[1,19],[1,1],[1,1],[1,1],[1,1],[1,14],[1,3],[1,2],[1,4],[1,2],[1,1],[1,4],[1,1],[1,1],[1,5],[1,2],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,11],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,3],[1,9],[1,2],[1,6],[1,9],[1,3],[1,1],[1,1],[1,5],[1,1],[1,3],[1,2],[1,9],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,4],[1,2],[1,1],[1,3],[1,2],[1,1],[1,12],[1,1],[1,1],[1,1],[1,1],[2,5],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,3],[2,3],[2,114],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,9],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,3],[2,19],[2,1],[2,8],[2,2],[2,2],[2,7],[2,1],[2,1],[3,2],[3,1],[3,5],[3,3],[3,1],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,30],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,2],[3,2],[3,1],[3,2],[3,2],[3,1],[3,2],[3,1],[3,2],[4,1],[4,3],[4,1],[4,1],[4,7],[4,2],[4,2],[4,3],[4,3],[4,2],[4,2],[4,1],[4,1],[4,2],[4,1],[4,2],[4,1],[4,1],[4,6],[5,2],[5,1],[5,2],[5,1],[5,7],[5,7],[5,1],[5,2],[5,1],[6,1],[6,1],[6,1],[6,2],[6,1],[6,1],[6,4],[6,1],[7,1],[7,1],[7,1],[7,3],[7,1],[7,1],[7,1],[8,1],[8,2],[8,3],[8,1],[8,1],[8,9],[8,6],[9,1],[9,3],[9,4],[10,4],[10,1],[10,3],[10,1],[10,19],[11,3],[11,2],[11,5],[11,5],[11,1],[12,7],[13,3],[13,4],[13,2],[13,4],[14,2],[16,1],[93,1],[22,2],[42,6],[15,1],[16,3],[36,8],[34,1],[30,3],[43,7],[46,8],[40,1],[22,1],[1,3],[1,1],[1,13],[1,2],[1,3],[1,2],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,13],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,6],[1,4],[1,1],[1,4],[1,1],[1,2],[1,3],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,3],[1,2],[1,3],[1,3],[1,2],[1,1],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,2],[1,2],[1,3],[1,7],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,4],[1,5],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,7],[1,6],[1,1],[1,2],[1,3],[1,3],[1,1],[1,4],[1,2],[1,7],[1,2],[1,5],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,3],[1,6],[1,2],[1,2],[1,1],[1,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,3],[2,1],[2,2],[2,12],[2,1],[2,1],[2,3],[2,3],[2,1],[2,2],[2,3],[2,3],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,8],[2,2],[2,1],[2,2],[2,1],[2,1],[2,7],[2,1],[2,1],[2,1],[2,7],[2,2],[2,1],[2,18],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,5],[2,1],[2,1],[2,6],[2,3],[2,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[4,6],[4,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,2],[4,2],[4,5],[4,2],[4,2],[4,2],[4,2],[4,1],[4,3],[4,2],[4,1],[5,1],[5,3],[5,2],[5,2],[5,1],[5,1],[5,3],[5,1],[5,1],[5,2],[5,4],[5,4],[5,1],[6,2],[6,2],[6,2],[6,1],[6,1],[6,1],[6,1],[6,4],[6,1],[7,2],[7,1],[7,2],[7,1],[7,1],[7,1],[8,2],[8,2],[8,3],[8,14],[9,5],[9,2],[9,1],[9,1],[10,8],[10,2],[11,1],[11,1],[12,1],[12,1],[12,1],[12,7],[12,3],[48,1],[73,3],[22,2],[19,1],[20,1],[40,2],[15,2],[34,1],[22,5],[31,2],[47,28],[51,1],[19,2],[231,1],[15,3],[18,2],[18,3],[101,5],[65,2],[30,11],[18,3],[1,1],[1,2],[1,2],[1,1],[1,3],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,64],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,4],[1,5],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,6],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,14],[1,1],[1,1],[1,1],[1,1],[1,2],[1,12],[1,2],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,1],[1,5],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,2],[1,3],[1,1],[2,2],[2,1],[2,3],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,10],[2,2],[2,1],[2,2],[2,3],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,6],[2,2],[2,4],[2,9],[2,2],[2,1],[2,3],[2,2],[2,10],[2,3],[2,1],[2,37],[2,2],[2,2],[2,2],[3,9],[3,4],[3,3],[3,2],[3,2],[3,1],[3,19],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,2],[3,10],[3,1],[3,1],[3,1],[3,1],[3,3],[3,6],[4,2],[4,5],[4,1],[4,3],[4,10],[4,1],[4,1],[4,1],[4,1],[4,4],[4,5],[4,1],[4,1],[4,2],[5,2],[5,2],[5,1],[5,2],[5,1],[5,3],[5,2],[5,1],[5,1],[6,3],[6,1],[6,1],[6,6],[6,1],[6,3],[7,2],[7,1],[7,1],[7,1],[7,1],[7,1],[8,1],[8,2],[8,1],[8,3],[8,1],[9,1],[9,1],[9,2],[10,3],[10,4],[10,1],[11,1],[12,1],[12,1],[13,1],[13,3],[13,1],[14,1],[35,2],[15,7],[32,1],[80,1],[22,2],[16,1],[25,1],[156,1],[175,2],[460,1],[63,1],[74,3],[121,2],[16,3],[49,5],[29,1],[16,1],[1,5],[1,4],[1,3],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,3],[1,4],[1,12],[1,1],[1,3],[1,1],[1,2],[1,3],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,12],[1,1],[1,1],[1,3],[1,1],[1,2],[1,38],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,10],[1,3],[1,3],[1,4],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,6],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,9],[1,1],[1,1],[1,4],[1,4],[1,3],[1,3],[1,2],[1,1],[1,6],[1,2],[1,3],[1,1],[1,5],[1,2],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,3],[1,1],[1,6],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,2],[1,8],[1,1],[1,3],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,3],[1,1],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[2,1],[2,1],[2,4],[2,7],[2,1],[2,3],[2,2],[2,3],[2,2],[2,10],[2,2],[2,6],[2,4],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,4],[2,1],[2,1],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,10],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,3],[2,2],[2,2],[3,5],[3,3],[3,26],[3,1],[3,4],[3,2],[3,5],[3,1],[3,3],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,1],[3,4],[3,2],[4,8],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,2],[4,1],[4,5],[4,1],[4,2],[4,2],[4,2],[4,3],[4,2],[5,2],[5,1],[5,2],[5,3],[5,1],[5,1],[5,3],[5,1],[5,1],[5,1],[6,4],[6,2],[6,1],[6,1],[6,7],[6,2],[7,1],[7,1],[7,1],[7,3],[7,3],[7,3],[8,2],[8,1],[8,3],[9,3],[9,2],[9,1],[9,3],[9,2],[10,1],[10,1],[10,4],[11,2],[11,1],[11,1],[12,1],[12,55],[12,1],[13,1],[35,4],[21,9],[26,1],[165,7],[21,1],[55,5],[19,10],[18,5],[17,1],[67,1],[68,4],[19,1],[24,6],[89,3],[21,1],[40,1],[52,2],[16,1],[1,3],[1,4],[1,1],[1,4],[1,2],[1,3],[1,1],[1,3],[1,1],[1,4],[1,1],[1,1],[1,14],[1,5],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,22],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,4],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,2],[1,5],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,4],[1,1],[1,2],[1,37],[1,1],[1,2],[1,1],[1,2],[1,2],[1,5],[1,1],[1,1],[1,11],[1,2],[1,1],[1,1],[1,1],[1,7],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,6],[1,2],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,4],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,11],[1,2],[1,1],[1,6],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,8],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,5],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,1],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[2,19],[2,6],[2,3],[2,1],[2,2],[2,3],[2,2],[2,6],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,7],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,7],[2,1],[2,3],[2,3],[2,1],[3,6],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,3],[3,1],[3,1],[3,29],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[3,15],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,7],[3,3],[3,4],[3,1],[4,2],[4,10],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[5,3],[5,2],[5,1],[5,4],[5,1],[5,2],[5,1],[6,13],[6,2],[6,2],[6,2],[6,1],[6,1],[6,1],[7,1],[7,1],[7,2],[8,1],[8,1],[8,1],[9,2],[9,1],[9,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,112],[10,1],[11,1],[11,3],[11,11],[12,1],[13,2],[13,1],[13,2],[14,1],[78,1],[43,1],[20,1],[15,1],[26,5],[17,2],[32,2],[93,2],[57,2],[25,1],[112,4],[18,1],[73,1],[30,55],[24,1],[699,1],[17,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,3],[1,3],[1,1],[1,2],[1,2],[1,3],[1,1],[1,4],[1,5],[1,3],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,4],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,4],[1,4],[1,1],[1,3],[1,1],[1,1],[1,1],[1,9],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,13],[1,2],[1,1],[1,1],[1,1],[1,7],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,15],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,7],[1,3],[1,1],[1,1],[1,1],[1,5],[1,1],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,6],[1,2],[1,4],[1,15],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,2],[1,1],[2,1],[2,10],[2,3],[2,1],[2,1],[2,1],[2,3],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,2],[2,1],[2,24],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,5],[2,3],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,4],[2,1],[3,2],[3,2],[3,1],[3,2],[3,1],[3,3],[3,1],[3,1],[3,1],[3,3],[3,13],[3,10],[3,7],[3,1],[3,1],[3,1],[3,9],[3,9],[3,1],[3,2],[3,11],[3,1],[3,4],[3,1],[3,1],[4,2],[4,1],[4,2],[4,1],[4,115],[4,1],[4,1],[4,1],[4,1],[4,2],[4,2],[4,1],[4,2],[4,4],[4,9],[4,1],[4,1],[5,1],[5,2],[5,3],[5,2],[5,1],[5,4],[5,1],[5,2],[5,1],[5,1],[5,1],[5,7],[5,1],[5,1],[6,39],[6,2],[6,3],[6,1],[7,1],[7,2],[7,3],[7,1],[7,2],[7,8],[7,1],[8,3],[8,1],[8,1],[8,1],[8,1],[9,3],[9,2],[9,1],[10,3],[10,25],[10,1],[10,1],[11,6],[11,1],[11,1],[11,1],[11,7],[12,1],[12,1],[12,1],[13,1],[13,1],[14,8],[14,1],[14,1],[74,2],[26,11],[69,1],[108,1],[20,5],[1263,1],[21,1],[16,1],[16,3],[32,2],[62,2],[50,1],[16,1],[15,1],[22,5],[1,2],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,5],[1,10],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,7],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,4],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,9],[1,7],[1,9],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,15],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,42],[1,12],[1,3],[1,3],[1,5],[1,2],[1,1],[1,5],[1,4],[1,3],[1,3],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,12],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,3],[1,1],[1,5],[1,1],[1,16],[1,1],[1,7],[1,1],[1,1],[1,3],[1,1],[1,7],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[2,1],[2,3],[2,1],[2,1],[2,9],[2,2],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,3],[2,2],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,4],[2,2],[2,1],[2,10],[2,2],[2,1],[2,4],[2,1],[2,4],[2,3],[2,1],[2,1],[2,1],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[3,1],[3,3],[3,135],[3,1],[3,10],[3,1],[3,1],[3,3],[3,2],[3,2],[3,2],[3,5],[3,1],[3,2],[3,7],[3,2],[3,1],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[4,91],[4,2],[4,2],[4,3],[4,10],[4,3],[4,2],[4,3],[4,1],[4,1],[4,32],[4,2],[4,2],[5,1],[5,1],[5,3],[5,1],[5,3],[5,2],[5,1],[5,34],[5,2],[5,7],[5,2],[5,1],[6,2],[6,1],[6,5],[6,2],[6,1],[6,1],[7,2],[7,2],[7,1],[7,1],[7,6],[7,1],[8,1],[8,2],[8,1],[8,5],[8,4],[8,1],[8,3],[8,1],[9,4],[9,7],[9,1],[11,2],[11,2],[11,1],[11,1],[11,2],[11,19],[11,6],[12,6],[13,2],[13,1],[13,1],[14,1],[76,1],[65,1],[15,2],[19,1],[15,1],[32,1],[33,1],[19,4],[27,3],[62,7],[36,2],[39,3],[44,3],[17,1],[940,4],[20,1],[16,5],[17,4],[21,1],[46,1],[55,1],[251,12],[27,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,12],[1,8],[1,1],[1,1],[1,5],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,9],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,3],[1,2],[1,1],[1,3],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,32],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,5],[1,1],[1,11],[1,4],[1,15],[1,3],[1,2],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,11],[1,9],[1,1],[1,2],[1,6],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,128],[1,3],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,3],[1,2],[1,3],[1,1],[1,1],[1,1],[1,3],[1,2],[1,2],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,17],[1,1],[1,1],[1,1],[1,3],[1,8],[2,1],[2,1],[2,3],[2,1],[2,3],[2,2],[2,4],[2,2],[2,1],[2,3],[2,1],[2,2],[2,1],[2,2],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,10],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,2],[2,1],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[3,1],[3,2],[3,1],[3,8],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,3],[3,2],[3,3],[3,1],[3,1],[3,2],[3,1],[3,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,3],[4,1],[4,2],[4,2],[4,1],[4,1],[5,33],[5,5],[5,2],[5,1],[5,5],[5,48],[6,2],[6,3],[6,2],[6,1],[6,1],[6,2],[6,3],[6,1],[6,3],[7,8],[7,1],[7,1],[7,2],[8,1],[8,1],[8,1],[8,1],[8,2],[8,1],[9,1],[9,1],[9,1],[10,1],[10,1],[10,1],[11,2],[11,5],[12,1],[12,2],[12,2],[17,4],[17,1],[15,2],[29,5],[38,1],[20,1],[16,2],[24,1],[42,1],[29,1],[60,2],[20,1],[168,4],[17,33],[83,2],[71,1],[16,1],[18,3],[54,1],[15,8],[22,1],[36,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,7],[1,5],[1,1],[1,9],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,7],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,15],[1,1],[1,3],[1,2],[1,2],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,2],[1,1],[1,143],[1,1],[1,1],[1,2],[1,4],[1,4],[1,2],[1,2],[1,96],[1,1],[1,4],[1,16],[1,2],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,8],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,4],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,6],[1,1],[1,15],[1,1],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[1,2],[1,4],[1,1],[1,6],[1,5],[1,6],[1,1],[1,1],[1,1303],[1,2],[1,2],[1,1],[1,5],[1,2],[1,2],[1,12],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,4],[1,1],[1,3],[1,8],[2,1],[2,1],[2,2],[2,3],[2,1],[2,3],[2,1],[2,1],[2,1],[2,5],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,14],[2,1],[2,1],[2,1],[2,5],[2,1],[2,7],[2,3],[2,1],[2,3],[2,2],[2,3],[2,1],[2,1],[2,33],[2,1],[2,1],[2,1],[2,2],[2,3],[2,5],[2,1],[2,2],[2,8],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[3,1],[3,2],[3,1],[3,1],[3,1],[3,3],[3,16],[3,1],[3,4],[3,1],[3,1],[3,8],[3,2],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[3,2],[3,1],[3,1],[3,2],[3,5],[3,6],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,4],[3,1],[4,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,4],[4,2],[4,3],[4,1],[4,2],[4,2],[4,3],[4,1],[4,1],[4,1],[4,1],[4,45],[5,2],[5,1],[5,4],[5,2],[5,1],[5,1],[5,1],[5,1],[5,3],[5,1],[5,3],[6,5],[6,13],[6,4],[6,1],[6,2],[6,1],[6,2],[7,3],[7,1],[7,2],[7,1],[7,1],[8,1],[8,1],[8,1],[8,11],[8,4],[8,1],[8,1],[9,2],[9,1],[10,1],[10,1],[10,2],[11,25],[11,1],[11,1],[11,7],[11,1],[12,3],[12,1],[12,1],[26,3],[29,11],[18,1],[20,1],[15,1],[16,1],[35,4],[15,1],[63,2],[39,1],[64,4],[15,1],[15,1],[26,1],[64,1],[40,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,12],[1,1],[1,1],[1,2],[1,2],[1,3],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,10],[1,1],[1,1],[1,16],[1,1],[1,2],[1,47],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,170],[1,2],[1,2],[1,1],[1,1],[1,3],[1,3],[1,1],[1,5],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,2],[1,1],[1,3],[1,1],[1,14],[1,35],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,3],[1,2],[1,1],[1,1],[1,2],[1,1],[1,15],[1,13],[1,2],[1,1],[1,1],[1,8],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,3],[1,1],[1,53],[1,1],[1,4],[1,3],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,14],[2,3],[2,1],[2,2],[2,3],[2,9],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,4],[2,8],[2,3],[2,1],[2,1],[2,3],[2,2],[2,1],[2,1],[2,1],[2,2],[2,4],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,3],[2,1],[2,1],[2,4],[2,2],[2,161],[2,1],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,51],[3,1],[3,1],[3,3],[3,1],[3,3],[3,2],[3,1],[3,1],[3,2],[3,3],[3,4],[3,2],[3,2],[3,1],[3,1],[3,10],[3,1],[4,1],[4,1],[4,1],[4,4],[4,1],[4,1],[4,4],[4,1],[4,5],[4,9],[4,1],[4,3],[4,1],[5,4],[5,3],[5,1],[5,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,1],[5,1],[6,7],[6,1],[6,1],[6,1],[6,1],[6,1],[6,3],[6,2],[7,1],[7,2],[7,1],[7,1],[8,1],[8,2],[8,2],[9,1],[9,1],[10,3],[10,1],[10,1],[10,3],[11,9],[11,1],[11,1],[11,1],[11,1],[11,2],[11,2],[12,1],[12,4],[13,2],[13,2],[13,15],[14,1],[14,1],[17,3],[185,1],[51,1],[21,3],[19,3],[17,1],[29,1],[38,4],[169,24],[41,4],[15,1],[59,5],[87,3],[169,1],[29,5],[28,1],[25,4],[48,1],[15,3],[18,1],[22,2],[36,4],[134,1],[19,1],[15,1],[17,3],[56,1],[24,1],[17,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,3],[1,6],[1,4],[1,6],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,9],[1,79],[1,1],[1,4],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,7],[1,1],[1,3],[1,3],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,3],[1,5],[1,4],[1,1],[1,2],[1,5],[1,2],[1,1],[1,10],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,24],[1,2],[1,1],[1,11],[1,2],[1,8],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,4],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,31],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,1],[1,7],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,1],[1,13],[1,5],[1,3],[1,2],[1,4],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,3],[1,3],[1,1],[1,2],[1,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,5],[2,2],[2,8],[2,1],[2,1],[2,1],[2,3],[2,13],[2,6],[2,1],[2,4],[2,1],[2,2],[2,2],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,6],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,4],[2,6],[2,1],[2,1],[2,1],[2,1],[2,6],[2,1],[2,1],[2,1],[2,2],[2,2],[2,4],[3,1],[3,1],[3,2],[3,1],[3,5],[3,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,6],[3,1],[3,8],[3,1],[3,1],[3,1],[3,1],[3,13],[3,3],[3,1],[3,2],[3,2],[3,1],[4,4],[4,1],[4,1],[4,3],[4,1],[4,1],[4,1],[4,2],[5,4],[5,1],[5,2],[5,3],[5,1],[5,1],[5,1],[5,1],[5,2],[6,8],[7,1],[7,1],[7,2],[8,2],[8,2],[8,2],[8,3],[8,3],[8,1],[8,1],[9,1],[9,1],[10,1],[10,3],[10,1],[12,3],[12,2],[12,2],[12,1],[12,1],[12,1],[13,3],[13,1],[13,1],[14,1],[17,1],[25,7],[15,6],[111,8],[92,1],[26,21],[328,1],[16,1],[752,1],[16,1],[22,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,2],[1,3],[1,6],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,7],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,5],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,1],[1,1],[1,4],[1,2],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,8],[1,2],[1,2],[1,3],[1,2],[1,2],[1,3],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,6],[1,1],[1,1],[1,2],[1,2],[1,6],[1,1],[1,1],[1,8],[1,5],[1,1],[1,2],[1,4],[1,21],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,4],[1,2],[2,5],[2,1],[2,1],[2,4],[2,2],[2,1],[2,3],[2,1],[2,2],[2,8],[2,1],[2,2],[2,12],[2,2],[2,2],[2,1],[2,5],[2,2],[2,2],[2,1],[2,2],[2,1],[2,3],[2,4],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,4],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,4],[2,5],[2,1],[2,2],[2,2],[2,9],[2,1],[2,1],[3,3],[3,1],[3,1],[3,5],[3,1],[3,2],[3,3],[3,1],[3,12],[3,2],[3,1],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,1],[3,1],[3,7],[4,2],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,3],[4,1],[4,3],[5,1],[5,2],[5,1],[5,1],[5,1],[5,1],[6,1],[6,5],[6,11],[6,1],[6,1],[6,2],[6,1],[6,4],[6,1],[6,1],[7,5],[7,1],[7,1],[8,1],[8,3],[9,2],[9,1],[10,1],[11,1],[11,1],[11,2],[11,1],[12,4],[12,2],[13,1],[13,1],[13,2],[14,6],[14,1],[68,4],[113,4],[22,1],[48,79],[28,2],[88,1],[232,2],[23,1],[32,1],[72,2],[26,1],[20,1],[53,1],[16,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,8],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,6],[1,1],[1,3],[1,1],[1,3],[1,4],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,5],[1,4],[1,1],[1,1],[1,9],[1,6],[1,5],[1,1],[1,1],[1,3],[1,2],[1,9],[1,2],[1,3],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,2],[1,1],[1,16],[1,3],[1,1],[1,86],[1,1],[1,2],[1,4],[1,2],[1,16],[1,9],[1,4],[1,2],[1,9],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,10],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,12],[1,2],[1,4],[1,1],[1,1],[1,2],[1,2],[1,4],[2,6],[2,3],[2,2],[2,1],[2,3],[2,2],[2,2],[2,2],[2,6],[2,1],[2,4],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,3],[2,1],[2,1],[2,1],[2,3],[2,1],[2,2],[2,2],[2,1],[2,2],[2,9],[2,10],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,3],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,8],[2,2],[2,1],[2,3],[2,1],[3,1],[3,1],[3,1],[3,2],[3,7],[3,5],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,2],[3,1],[3,1],[3,2],[3,1],[3,2],[3,5],[3,2],[4,1],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,6],[4,2],[5,5],[5,2],[5,9],[5,5],[5,1],[5,2],[5,1],[5,2],[6,7],[6,7],[7,3],[7,8],[7,1],[7,1],[7,2],[7,7],[8,1],[8,1],[8,1],[9,6],[9,4],[10,2],[10,1],[10,1],[10,3],[10,2],[11,1],[12,5],[12,3],[12,1],[13,1],[14,2],[14,3],[14,4],[30,1],[19,1],[27,1],[24,12],[20,24],[20,1],[80,1],[26,1],[25,1],[35,1],[150,1],[22,1],[28,1],[187,2],[15,2],[21,1],[22,1],[17,8],[27,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,4],[1,1],[1,3],[1,5],[1,1],[1,10],[1,8],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,3],[1,7],[1,3],[1,1],[1,10],[1,1],[1,4],[1,1],[1,1],[1,2],[1,7],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,4],[1,1],[1,2],[1,3],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,1],[1,1],[1,5],[1,2],[1,1],[1,5],[1,1],[1,1],[1,5],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,4],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,17],[1,4],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,1],[1,6],[1,2],[1,1],[1,28],[1,3],[1,1],[1,3],[1,1],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[2,1],[2,3],[2,1],[2,4],[2,1],[2,3],[2,2],[2,1],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,9],[2,1],[2,1],[2,7],[2,3],[2,1],[2,1],[2,3],[2,4],[2,2],[2,2],[2,2],[2,1],[2,3],[2,2],[2,3],[2,2],[2,1],[2,1],[2,2],[3,10],[3,1],[3,3],[3,4],[3,4],[3,398],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,3],[3,1],[3,1],[3,4],[3,3],[3,2],[3,1],[4,2],[4,16],[4,3],[4,2],[4,1],[4,4],[4,1],[4,1],[4,4],[4,1],[4,1],[4,1],[4,21],[4,5],[4,1],[4,3],[4,2],[4,2],[4,1],[4,2],[4,1],[4,2],[5,3],[5,1],[5,3],[5,1],[5,5],[5,7],[5,1],[5,1],[5,1],[5,7],[5,4],[5,6],[5,1],[6,1],[6,2],[6,3],[6,2],[6,1],[6,3],[7,8],[7,6],[7,1],[7,2],[7,1],[7,1],[8,4],[8,1],[8,4],[8,1],[8,1],[8,8],[8,3],[9,1],[9,1],[9,2],[10,6],[11,1],[11,1],[11,1],[12,1],[12,4],[12,6],[13,3],[13,1],[520,3],[292,13],[16,1],[20,1],[44,3],[22,1],[17,2],[18,1],[46,5],[19,1],[15,3],[28,1],[23,1],[19,13],[25,2],[23,134],[68,1],[79,13],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,5],[1,1],[1,1],[1,3],[1,1],[1,2],[1,6],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,12],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,2],[1,6],[1,1],[1,1],[1,36],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,5],[1,1],[1,5],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,3],[1,2],[1,2],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,22],[1,1],[1,1],[1,1],[1,187],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,2],[1,5],[1,4],[1,1],[1,2],[1,1],[1,20],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,1],[2,1],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,5],[2,1],[2,2],[2,1],[2,1],[2,6],[2,6],[2,9],[2,1],[2,2],[2,1],[2,2],[2,2],[2,3],[2,6],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,44],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[3,9],[3,4],[3,1],[3,2],[3,1],[3,1],[3,1],[3,4],[3,2],[3,1],[3,1],[3,21],[3,6],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,3],[3,1],[3,3],[3,5],[3,1],[3,1],[3,5],[3,1],[3,2],[3,2],[3,1],[3,1],[3,1],[4,92],[4,1],[4,1],[4,1],[4,13],[4,4],[4,1],[4,1],[4,2],[4,1],[4,1],[5,1],[5,1],[5,1],[5,2],[5,1],[5,3],[5,3],[5,1],[5,1],[5,1],[5,4],[5,1],[6,1],[6,3],[6,2],[6,23],[6,2],[6,3],[6,35],[7,1],[7,1],[7,1],[8,690],[8,1],[8,3],[9,2],[9,5],[9,1],[10,4],[11,6],[12,4],[12,1],[14,15],[14,1],[18,1],[46,1],[16,1],[24,4],[27,2],[21,1],[98,1],[107,3],[44,16],[16,1],[28,1],[1,1],[1,2],[1,7],[1,3],[1,1],[1,1],[1,2],[1,2],[1,14],[1,1],[1,1],[1,1],[1,36],[1,1],[1,3],[1,4],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,13],[1,51],[1,1],[1,1],[1,3],[1,1],[1,3],[1,1],[1,6],[1,2],[1,2],[1,1],[1,3],[1,1],[1,5],[1,3],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,94],[1,6],[1,1],[1,1],[1,1],[1,2],[1,4],[1,5],[1,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,5],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,5],[1,2],[1,1],[1,2],[1,2],[1,5],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,4],[1,4],[1,1],[1,28],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,10],[1,4],[1,4],[1,2],[1,1],[1,3],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,3],[1,5],[1,7],[2,1],[2,5],[2,1],[2,3],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,7],[2,7],[2,2],[2,4],[2,3],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[3,1],[3,1],[3,2],[3,2],[3,1],[3,1],[3,5],[3,5],[3,1],[3,1],[3,10],[3,30],[3,1],[3,1],[3,1],[3,3],[3,1],[3,4],[3,3],[3,3],[3,1],[3,1],[3,2],[3,1],[3,92],[3,1],[4,4],[4,1],[4,2],[4,5],[4,1],[4,2],[4,2],[4,1],[4,4],[4,1],[4,1],[4,1],[5,1],[5,2],[5,1],[5,1],[5,1],[5,4],[5,2],[5,1],[5,10],[6,2],[6,1],[6,1],[6,1],[6,4],[6,2],[6,1],[6,1],[6,2],[7,1],[7,1],[7,1],[7,1],[7,2],[7,1],[7,1],[8,5],[8,1],[8,1],[8,5],[8,5],[8,1],[9,2],[9,1],[9,4],[9,4],[10,1],[10,1],[10,5],[10,5],[10,1],[10,1],[11,1],[11,1],[11,1],[11,2],[12,1],[12,2],[12,2],[12,1],[13,1],[13,1],[13,3],[14,1],[14,22],[14,1],[14,1],[14,2],[20,4],[27,1],[18,2],[49,1],[16,3],[15,1],[18,1],[15,1],[18,1],[15,1],[27,2],[21,1],[23,1],[54,1],[22,1],[46,1],[17,1],[37,7],[17,1],[19,1],[33,2],[62,1],[18,4],[18,1],[24,1],[18,1],[36,1],[20,1],[125,1],[18,13],[36,1],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,4],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,10],[1,6],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,4],[1,1],[1,3],[1,8],[1,2],[1,4],[1,10],[1,1],[1,71],[1,1],[1,2],[1,18],[1,1],[1,3],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,34],[1,9],[1,2],[1,7],[1,3],[1,3],[1,3],[1,3],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,8],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,6],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,9],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,6],[1,1],[1,10],[1,1],[1,10],[1,1],[1,2],[1,2],[1,2],[1,3],[1,1],[1,2],[1,3],[1,2],[1,2],[1,20],[1,2],[1,3],[1,2],[1,1],[1,1],[1,5],[1,1],[1,5],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,2],[2,1],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,10],[2,1],[2,1],[2,6],[2,3],[2,5],[2,3],[2,1],[2,1],[2,11],[2,2],[2,3],[2,2],[2,1],[2,7],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,3],[2,1],[2,3],[2,2],[2,1],[2,6],[2,3],[2,1],[2,1],[2,1],[3,4],[3,2],[3,1],[3,8],[3,1],[3,49],[3,2],[3,2],[3,3],[3,1],[3,2],[3,5],[3,3],[3,2],[3,1],[3,3],[3,1],[3,2],[3,13],[3,7],[3,2],[3,1],[4,2],[4,4],[4,1],[4,2],[4,1],[4,1],[4,1],[4,2],[5,1],[5,4],[5,1],[5,1],[5,1],[5,1],[5,1],[5,4],[5,1],[5,2],[6,1],[6,7],[6,1],[6,1],[6,4],[6,2],[6,3],[6,1],[6,9],[7,1],[7,1],[8,3],[8,7],[8,1],[8,2],[8,2],[8,2],[8,8],[8,1],[9,1],[9,1],[9,1],[9,2],[10,1],[11,3],[12,1],[12,1],[12,2],[12,1],[12,3],[13,1],[14,1],[58,1],[21,1],[36,15],[218,1],[34,1],[20,2],[16,2],[28,1],[38,1],[38,3],[16,1],[165,2],[132,1],[19,2],[260,1],[39,2],[64,1],[18,1],[1,1],[1,1],[1,1],[1,12],[1,1],[1,2],[1,1],[1,5],[1,2],[1,2],[1,1],[1,2],[1,1],[1,13],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,2],[1,5],[1,1],[1,3],[1,2],[1,1],[1,2],[1,6],[1,1],[1,2],[1,2],[1,7],[1,1],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,1],[1,1],[1,3],[1,6],[1,1],[1,1],[1,1],[1,6],[1,3],[1,2],[1,6],[1,2],[1,1],[1,3],[1,1],[1,2],[1,1],[1,1],[1,2],[1,3],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,6],[1,1],[1,2],[1,63],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,2],[1,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,2],[1,3],[1,9],[1,2],[1,1],[1,2],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,10],[1,1],[1,2],[1,1],[1,2],[1,2],[1,7],[1,1],[1,8],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,1],[1,15],[1,6],[1,1],[1,1],[1,422],[1,2],[1,2],[1,4],[1,2],[1,2],[1,3],[1,2],[1,3],[1,1],[1,5],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[2,4],[2,3],[2,1],[2,2],[2,2],[2,3],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,2],[2,2],[2,2],[2,13],[2,11],[2,4],[2,1],[2,2],[2,10],[2,5],[2,2],[2,75],[2,3],[2,1],[2,8],[2,4],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,14],[2,2],[2,15],[2,1],[2,2],[2,4],[2,1],[2,1],[2,2],[2,33],[2,2],[2,1],[2,1],[2,3],[2,2],[2,2],[2,1],[3,1],[3,13],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,6],[3,7],[3,2],[3,1],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,2],[3,3],[3,3],[3,2],[3,1],[3,6],[3,2],[3,4],[3,2],[4,4],[4,4],[4,4],[4,4],[4,6],[4,1],[4,1],[4,1],[4,3],[4,1],[4,2],[4,5],[4,1],[5,4],[5,1],[5,2],[5,8],[5,3],[5,1],[5,1],[5,1],[5,1],[5,3],[6,1],[6,3],[6,2],[6,4],[6,1],[6,3],[6,1],[6,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,3],[8,1],[8,1],[8,1],[8,7],[9,2],[10,2],[10,1],[10,6],[11,1],[11,3],[11,2],[12,1],[12,1],[14,2],[14,6],[17,2],[19,1],[15,1],[112,1],[16,1],[30,6],[19,3],[15,4],[19,2],[25,1],[17,4],[49,1],[48,1],[26,1],[17,9],[43,3],[51,6],[17,1],[21,3],[26,4],[31,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,9],[1,1],[1,753],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,1],[1,1],[1,7],[1,2],[1,6],[1,3],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,3],[1,4],[1,3],[1,4],[1,1],[1,2],[1,1],[1,6],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,3],[1,3],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,26],[1,3],[1,1],[1,1],[1,4],[1,1],[1,1],[1,5],[1,2],[1,3],[1,1],[1,5],[1,2],[1,2],[1,2],[1,2],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,3],[1,1],[1,4],[1,8],[1,10],[1,1],[1,2],[1,6],[1,1],[1,2],[1,2],[1,2],[1,6],[1,1],[1,1],[1,15],[1,2],[2,1],[2,12],[2,1],[2,8],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,20],[2,2],[2,2],[2,1],[2,1],[2,2],[2,2],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,14],[2,2],[2,1],[2,5],[2,5],[2,1],[2,2],[2,2],[2,6],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[3,2],[3,3],[3,3],[3,1],[3,1],[3,1],[3,3],[3,1],[3,1],[3,6],[3,8],[3,1],[3,1],[3,1],[3,3],[3,12],[3,1],[3,1],[3,1],[3,1],[3,6],[3,1],[3,2],[3,1],[3,1],[4,5],[4,1],[4,5],[4,5],[4,29],[4,11],[4,1],[4,1],[4,2],[4,1],[4,1],[5,2],[5,4],[5,1],[5,6],[5,1],[5,1],[5,1],[5,1],[6,1],[6,4],[6,1],[6,4],[6,2],[6,2],[6,1],[6,1],[6,2],[6,1],[7,1],[7,2],[7,1],[7,1],[7,2],[8,3],[8,4],[8,5],[8,7],[8,5],[9,5],[9,1],[9,1],[10,2],[10,2],[10,4],[11,1],[11,1],[12,8],[12,1],[12,1],[13,1],[13,1],[13,2],[14,2],[20,4],[18,3],[65,1],[23,1],[20,3],[237,1],[70,5],[80,2],[71,1],[15,4],[18,8],[54,1],[30,1],[15,2],[26,2],[20,1],[17,1],[26,4],[20,13],[1,2],[1,1],[1,3],[1,1],[1,3],[1,5],[1,3],[1,1],[1,5],[1,1],[1,3],[1,7],[1,2],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,2],[1,11],[1,1],[1,6],[1,4],[1,3],[1,3],[1,2],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,3],[1,1],[1,2],[1,7],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,5],[1,2],[1,1],[1,1],[1,4],[1,1],[1,10],[1,4],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,2],[1,1],[1,4],[1,1],[1,1],[1,1],[1,3],[1,2],[1,1],[1,2],[1,3],[1,1],[1,2],[1,1],[1,4],[1,1],[1,8],[1,1],[1,1],[1,2],[1,4],[1,1],[1,34],[1,2],[1,2],[1,1],[1,1],[1,4],[1,1],[1,3],[1,7],[1,4],[1,7],[1,7],[1,1],[1,3],[1,1],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,14],[1,6],[1,6],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[2,2],[2,1],[2,1],[2,4],[2,2],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,2],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,2],[2,1],[2,2],[2,6],[2,1],[2,1],[2,1],[2,2],[2,2],[3,3],[3,7],[3,4],[3,2],[3,3],[3,1],[3,1],[3,4],[3,1],[3,14],[3,2],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,9],[3,25],[3,1],[3,1],[4,1],[4,9],[4,1],[4,3],[4,1],[4,1],[4,12],[4,1],[4,3],[4,7],[4,2],[4,1],[4,1],[4,1],[4,1],[4,1],[5,5],[5,2],[5,1],[5,1],[5,2],[5,5],[5,1],[5,1],[5,1],[5,1],[5,1],[6,5],[6,1],[6,3],[6,1],[6,4],[6,1],[6,1],[6,3],[6,2],[6,1],[7,1],[7,1],[7,1],[7,1],[7,1],[8,2],[8,1],[8,1],[8,1],[8,1],[9,2],[10,374],[10,3],[11,1],[11,1],[11,3],[11,8],[11,4],[12,1],[13,3],[13,2],[13,4],[58,1],[43,1],[38,1],[196,1],[55,3],[15,1],[79,1],[16,5],[20,1],[32,1],[111,1],[68,1],[50,17],[327,47],[46,3],[24,3],[41,2],[65,1],[1,2],[1,14],[1,4],[1,1],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,7],[1,4],[1,5],[1,8],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,6],[1,2],[1,1],[1,5],[1,1],[1,3],[1,29],[1,4],[1,2],[1,1],[1,1],[1,4],[1,2],[1,9],[1,5],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,4],[1,2],[1,1],[1,8],[1,2],[1,13],[1,1],[1,1],[1,1],[1,2],[1,2],[1,2],[1,4],[1,6],[1,1],[1,1],[1,3],[1,2],[1,4],[1,2],[1,10],[1,2],[1,2],[1,2],[1,1],[1,4],[1,2],[1,1],[1,5],[1,93],[1,1],[1,1],[1,3],[1,22],[1,1],[1,1],[1,4],[1,2],[1,2],[1,1],[1,1],[1,4],[1,1],[1,6],[1,1],[1,3],[1,4],[1,1],[1,1],[1,2],[1,2],[1,8],[1,3],[1,1],[1,5],[1,6],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,3],[1,2],[1,1],[1,2],[1,2],[1,2],[1,28],[1,1],[1,6],[1,6],[1,2],[2,1],[2,2],[2,1],[2,2],[2,1],[2,2],[2,6],[2,1],[2,1],[2,2],[2,6],[2,2],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,2],[2,2],[2,6],[2,3],[2,3],[2,1],[2,2],[2,2],[2,1],[2,1],[2,14],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,9],[2,2],[2,1],[2,5],[2,1],[2,1],[2,3],[2,2],[2,2],[2,7],[2,16],[2,6],[2,2],[2,2],[2,1],[2,2],[3,1],[3,26],[3,1],[3,2],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,4],[3,1],[3,3],[3,3],[3,1],[3,1],[3,1],[3,1],[3,1],[3,12],[3,2],[3,2],[3,4],[3,1],[3,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[4,1],[4,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,1],[4,1],[4,2],[4,1],[4,8],[4,3],[4,1],[4,4],[5,2],[5,2],[5,1],[5,1],[5,1],[5,9],[6,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,10],[6,1],[7,1],[7,11],[7,4],[7,1],[7,2],[8,2],[8,1],[8,1],[8,1],[8,1],[8,4],[8,7],[9,1],[9,1],[10,2],[10,4],[10,1],[10,1],[11,6],[12,1],[12,1],[12,6],[13,1],[13,5],[13,2],[13,11],[14,8],[14,3],[16,1],[55,1],[17,1],[91,1],[27,1],[16,1],[17,1],[37,1],[54,3],[73,2],[50,1],[19,3],[20,2],[26,1],[55,3],[54,1],[31,1],[68,2],[75,8],[412,1],[21,2],[1,6],[1,1],[1,2],[1,2],[1,4],[1,4],[1,2],[1,6],[1,5],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,9],[1,4],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,6],[1,3],[1,1],[1,2],[1,3],[1,12],[1,16],[1,3],[1,1],[1,1],[1,3],[1,3],[1,502],[1,3],[1,1],[1,1],[1,5],[1,2],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,6],[1,3],[1,2],[1,1],[1,5],[1,1],[1,6],[1,4],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,1],[1,17],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,4],[1,6],[1,1],[1,1],[1,11],[1,1],[1,4],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,3],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,5],[1,2],[2,1],[2,1],[2,3],[2,3],[2,2],[2,2],[2,9],[2,2],[2,1],[2,9],[2,1],[2,2],[2,2],[2,2],[2,5],[2,5],[2,2],[2,1],[2,2],[2,1],[2,1],[2,13],[2,5],[2,2],[2,1],[2,4],[2,1],[2,1],[2,2],[2,1],[2,2],[2,3],[2,3],[2,5],[2,3],[2,3],[2,10],[2,2],[2,2],[2,2],[2,4],[2,1],[2,2],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,3],[3,2],[3,2],[3,1],[3,7],[3,2],[3,2],[3,1],[3,5],[3,2],[3,3],[3,1],[3,8],[3,1],[3,1],[3,2],[3,14],[3,2],[4,2],[4,1],[4,2],[4,3],[4,2],[4,7],[4,1],[4,5],[4,1],[4,3],[4,10],[4,1],[4,2],[4,4],[4,4],[4,1],[5,1],[5,4],[5,2],[5,1],[5,1],[5,2],[5,8],[5,3],[5,1],[5,1],[6,2],[6,2],[6,1],[6,1],[6,1],[6,2],[6,15],[6,39],[6,3],[7,2],[7,1],[7,3],[7,1],[7,1],[8,1],[8,1],[9,2],[9,2],[9,1],[9,1],[10,1],[10,1],[10,1],[11,14],[11,1],[11,3],[11,1],[12,1],[12,1],[13,2],[13,2],[14,8],[16,1],[27,1],[21,5],[18,2],[36,1],[36,3],[28,15],[17,13],[18,7],[17,9],[28,2],[19,2],[27,1],[33,11],[40,2],[17,3],[120,2],[136,4],[21,1],[64,1],[23,3],[81,4],[27,1],[126,15],[17,1],[37,2],[21,1],[22,1],[58,1],[1,85],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,2],[1,1],[1,2],[1,3],[1,9],[1,2],[1,3],[1,7],[1,3],[1,2],[1,5],[1,2],[1,1],[1,3],[1,1],[1,1],[1,4],[1,13],[1,74],[1,14],[1,1],[1,1],[1,2],[1,1],[1,2],[1,4],[1,2],[1,5],[1,1],[1,4],[1,1],[1,4],[1,1],[1,1],[1,3],[1,2],[1,79],[1,1],[1,1],[1,6],[1,1],[1,2],[1,7],[1,2],[1,1],[1,2],[1,1],[1,7],[1,1],[1,2],[1,1],[1,4],[1,4],[1,3],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,2],[1,6],[1,1],[1,8],[1,2],[1,2],[1,1],[1,9],[1,1],[1,2],[1,1],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,11],[1,1],[1,5],[1,1],[1,4],[1,3],[1,8],[1,4],[1,1],[1,9],[1,1],[1,3],[1,1],[1,4],[1,1],[1,2],[1,3],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,2],[1,3],[1,8],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,11],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[2,6],[2,1],[2,3],[2,1],[2,3],[2,7],[2,6],[2,1],[2,2],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,2],[2,2],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,2],[2,2],[2,1],[2,4],[2,3],[2,2],[2,1],[2,6],[2,1],[2,3],[2,2],[2,2],[2,1],[2,3],[2,1],[2,2],[2,1],[2,1],[2,1],[2,3],[2,2],[2,1],[2,4],[2,5],[2,1],[2,1],[3,1],[3,57],[3,2],[3,1],[3,1],[3,2],[3,3],[3,15],[3,4],[3,1],[3,1],[3,9],[3,10],[3,5],[3,1],[3,4],[3,4],[3,1],[3,1],[3,6],[3,1],[4,2],[4,1],[4,1],[4,2],[4,1],[4,14],[4,3],[4,1],[4,1],[4,3],[4,10],[4,1],[4,2],[5,10],[5,1],[5,1],[5,3],[5,1],[5,5],[5,1],[6,5],[6,4],[6,2],[6,2],[6,3],[6,1],[7,1],[7,1],[7,4],[7,1],[7,2],[7,2],[7,2],[7,2],[8,2],[8,1],[8,4],[8,2],[8,4],[8,1],[9,1],[9,1],[10,3],[10,1],[11,1],[11,1],[12,9],[12,4],[12,2],[13,7],[13,4],[13,2],[13,7],[13,1],[14,1],[14,1],[23,1],[19,2],[16,1],[36,4],[15,4],[22,3],[17,1],[17,2],[38,2],[15,1],[34,1],[29,2],[20,7],[23,4],[44,5],[22,2],[18,1],[1,2],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,9],[1,1],[1,4],[1,2],[1,2],[1,1],[1,5],[1,1],[1,2],[1,1],[1,4],[1,2],[1,2],[1,1],[1,3],[1,3],[1,3],[1,2],[1,3],[1,1],[1,2],[1,5],[1,3],[1,1],[1,4],[1,1],[1,6],[1,4],[1,3],[1,1],[1,2],[1,1],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,8],[1,1],[1,2],[1,5],[1,1],[1,6],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,3],[1,10],[1,3],[1,7],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,1],[1,1],[1,2],[1,1],[1,43],[1,23],[1,2],[1,4],[1,33],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,7],[1,2],[1,4],[1,6],[1,1],[1,1],[1,1],[1,2],[1,7],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,136],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,3],[1,2],[1,1],[1,1],[1,1],[1,20],[2,1],[2,1],[2,16],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,3],[2,2],[2,1],[2,1],[2,2],[2,7],[2,2],[2,1],[2,2],[2,114],[2,1],[2,3],[2,4],[2,1],[2,4],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[2,6],[2,2],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,4],[2,2],[2,4],[2,3],[2,2],[2,1],[3,2],[3,1],[3,1],[3,5],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,8],[3,2],[3,1],[3,2],[3,28],[3,1],[3,118],[3,1],[3,1],[3,2],[3,2],[3,3],[3,8],[3,3],[4,1],[4,2],[4,4],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,2],[4,1],[4,1],[4,3],[4,1],[4,3],[4,1],[4,1],[4,1],[5,2],[5,1],[5,6],[5,1],[5,4],[5,2],[5,4],[5,1],[5,4],[6,4],[6,1],[6,3],[6,1],[6,2],[6,1],[7,1],[7,3],[7,1],[7,46],[7,2],[7,1],[8,3],[8,6],[8,1],[8,5],[9,12],[9,1],[9,5],[10,3],[10,3],[11,3],[11,7],[12,3],[12,1],[12,1],[13,1],[13,1],[13,2],[13,13],[13,1],[14,1],[14,1],[58,2],[112,1],[18,3],[19,1],[20,1],[18,1],[15,2],[92,1],[50,1],[40,1],[57,5],[19,2],[19,1],[15,4],[16,5],[54,1],[15,1],[1,2],[1,6],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,6],[1,7],[1,1],[1,2],[1,2],[1,3],[1,1],[1,1],[1,1],[1,1],[1,11],[1,3],[1,6],[1,1],[1,1],[1,6],[1,4],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,12],[1,1],[1,1],[1,1],[1,4],[1,1],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,2],[1,5],[1,2],[1,1],[1,1],[1,2],[1,8],[1,2],[1,1],[1,1],[1,2],[1,1],[1,19],[1,1],[1,1],[1,4],[1,1],[1,4],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,4],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,3],[1,5],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,2],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,2],[1,3],[1,9],[1,26],[1,3],[1,17],[1,1],[1,2],[1,1],[1,5],[1,4],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,8],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,1],[1,4],[1,30],[2,1],[2,4],[2,1],[2,2],[2,1],[2,1],[2,2],[2,3],[2,4],[2,2],[2,1],[2,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,2],[2,7],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,10],[2,4],[2,1],[2,1],[2,1],[2,3],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,3],[2,3],[2,7],[2,1],[2,1],[2,2],[2,5],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,4],[2,2],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,1],[3,1],[3,2],[3,29],[3,2],[4,2],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,3],[4,1],[5,2],[5,1],[5,1],[5,4],[5,1],[5,1],[5,2],[5,1],[5,1],[5,3],[6,4],[6,1],[6,1],[6,3],[6,2],[6,2],[6,1],[6,1],[6,1],[6,2],[7,2],[7,3],[7,2],[7,1],[7,2],[8,1],[8,1],[8,4],[8,1],[8,3],[9,1],[9,5],[9,1],[9,1],[9,1],[11,1],[11,2],[11,2],[11,3],[12,7],[12,1],[13,1],[14,2],[16,1],[78,3],[17,3],[27,3],[19,2],[67,3],[16,3],[58,3],[17,1],[29,2],[29,1],[23,1],[390,2],[75,2],[26,8],[20,3],[19,2],[16,4],[33,1],[66,2],[20,1],[17,5],[1,1],[1,2],[1,1],[1,1],[1,9],[1,4],[1,2],[1,3],[1,2],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,1],[1,1],[1,2],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,4],[1,5],[1,11],[1,1],[1,4],[1,2],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,3],[1,4],[1,1],[1,2],[1,3],[1,1],[1,1],[1,3],[1,1],[1,7],[1,1],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,8],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,6],[1,1],[1,1],[1,6],[1,2],[1,1],[1,11],[1,3],[1,1],[1,2],[1,4],[1,4],[1,1],[1,11],[1,7],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,2],[1,2],[1,1],[1,1],[1,14],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,3],[1,6],[1,1],[1,1],[1,3],[1,3],[1,2],[1,2],[1,7],[1,5],[1,2],[1,7],[1,7],[1,1],[1,3],[1,2],[1,4],[1,4],[1,3],[1,1],[1,1],[1,4],[1,2],[1,1],[1,1],[1,5],[1,3],[1,1],[1,124],[1,2],[1,6],[1,1],[1,1],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,2],[2,5],[2,21],[2,2],[2,1],[2,2],[2,1],[2,2],[2,1],[2,1],[2,7],[2,31],[2,1],[2,2],[2,4],[2,1],[2,3],[2,125],[2,1],[2,8],[2,1],[2,4],[2,2],[2,2],[2,1],[2,1],[2,1],[2,4],[2,5],[2,1],[2,2],[2,2],[2,1],[2,1],[2,1],[2,8],[2,1],[2,12],[2,278],[2,1],[2,1],[2,1],[2,1],[2,2],[2,1],[2,1],[3,1],[3,2],[3,1],[3,1],[3,1],[3,2],[3,3],[3,1],[3,1],[3,1],[3,1],[3,3],[3,2],[3,1],[3,1],[3,3],[3,1],[3,3],[3,1],[3,3],[3,1],[3,2],[3,3],[3,1],[4,2],[4,8],[4,1],[4,3],[4,3],[4,1],[4,3],[4,1],[4,1],[4,1],[4,1],[4,1],[4,1],[4,2],[4,1],[4,3],[5,1],[5,1],[5,1],[5,2],[5,2],[5,2],[5,1],[6,2],[6,2],[6,24],[6,2],[6,2],[6,20],[6,1],[6,1],[6,3],[6,1],[6,4],[6,5],[6,3],[7,2],[7,1],[7,4],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,134],[8,1],[8,1],[8,5],[8,1],[8,6],[9,3],[9,15],[10,4],[10,3],[10,1],[11,12],[11,2],[12,2],[12,2],[14,1],[14,6],[15,3],[30,2],[35,1],[28,1],[111,1],[22,1],[25,1],[18,1],[40,4],[58,1],[295,4],[18,3],[35,1],[16,1],[1,1],[1,1],[1,2],[1,1],[1,6],[1,6],[1,2],[1,1],[1,301],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,5],[1,1],[1,2],[1,1],[1,2],[1,2],[1,1],[1,1],[1,1],[1,3],[1,5],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,3],[1,2],[1,1],[1,7],[1,1],[1,2],[1,1],[1,2],[1,1],[1,2],[1,5],[1,1],[1,2],[1,1],[1,3],[1,1],[1,1],[1,17],[1,1],[1,1],[1,2],[1,2],[1,4],[1,3],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,4],[1,1],[1,1],[1,1],[1,1],[1,3],[1,3],[1,2],[1,1],[1,23],[1,1],[1,1],[1,1],[1,1],[1,3],[1,4],[1,1],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,1],[1,2],[1,1],[1,1],[1,4],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[1,1],[1,4],[1,1],[1,2],[1,1],[1,1],[1,1],[1,1],[1,2],[1,3],[1,4],[1,1],[1,1],[1,1],[1,2],[1,1],[1,3],[1,2],[1,2],[1,1],[1,1],[1,3],[1,15],[1,4],[1,1],[1,1],[1,3],[1,3],[1,1],[1,2],[1,2],[1,6],[1,1],[1,2],[1,1],[1,2],[1,2],[1,2],[1,1],[1,1],[1,3],[1,1],[1,1],[1,1],[2,2],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,1],[2,10],[2,3],[2,1],[2,1],[2,2],[2,7],[2,1],[2,1],[2,4],[2,1],[2,2],[2,1],[2,2],[2,2],[2,1],[2,1],[2,3],[2,6],[2,1],[2,1],[2,46],[2,1],[2,3],[2,1],[2,4],[2,1],[2,1],[2,1],[2,1],[2,2],[2,4],[2,4],[2,3],[3,11],[3,1],[3,1],[3,1],[3,1],[3,2],[3,1],[3,2],[3,4],[3,1],[3,1],[3,1],[3,3],[3,2],[3,1],[3,2],[3,2],[3,2],[3,1],[3,3],[3,1],[3,2],[3,2],[3,4],[3,1],[3,45],[3,2],[4,11],[4,2],[4,1],[4,2],[4,4],[4,14],[4,4],[4,2],[4,2],[4,1],[5,3],[5,1],[5,1],[5,2],[5,1],[5,2],[5,3],[5,2],[5,1],[5,2],[5,2],[6,1],[6,1],[6,3],[6,2],[6,1],[6,3],[6,1],[6,6],[7,1],[7,2],[7,1],[8,1],[8,2],[8,1],[8,1],[8,1],[8,2],[8,2],[8,2],[9,5],[9,2],[10,1],[10,1],[10,3],[11,8],[11,1],[12,5],[12,1],[14,1]])\n \n ida.scatter_plot(data, '{0}/faithful_ida_scatter.png'.format(output_dir))\n ida.histogram(data, '{0}/faithful_ida_hist.png'.format(output_dir))\n ida.linear_regression(data, '{0}/faithful_ida_regression.png'.format(output_dir))\n\n #clustering\n km2 = __run_clustering(data, output_dir)\n\n #expectation-maximization\n __run_em(data, output_dir, km2)\n\n #build bayes fmm model\n __run_bayesfmm(data, iterations, save_diagnostics, output_dir, burnin, km2)", "def export_image(self, params: Dict[str, str]) -> bytes:\n response = requests.post(self.export_url, data=params)\n self.export_output = response.content\n return self.export_output", "def write_images(deployment_key, image_data):\n\n for image_data_dict in image_data:\n\n print \"------------------>>> \" + image_data_dict['longitude']+\" \"+image_data_dict['latitude']\n\n #save the image\n image = Image(deployment_id=deployment_key,\n image_name=image_data_dict['image_name'],\n date_time=image_data_dict['date_time'],\n position=\"SRID=4326;POINT(\"+image_data_dict['longitude']+\" \"+image_data_dict['latitude']+\")\",\n #depth=image_data_dict['depth'],\n #depth_uncertainty=image_data_dict['depth_uncertainty'],\n )\n image.save()\n\n write_measurement(image, 'depth', 'm', image_data_dict['depth'])\n write_measurement(image, 'depth_uncertainty', 'm', image_data_dict['depth_uncertainty'])\n write_measurement(image, 'temperature', 'cel', image_data_dict['temperature'])\n write_measurement(image, 'salinity', 'psu', image_data_dict['salinity'])\n write_measurement(image, 'pitch', 'rad', image_data_dict['pitch'])\n write_measurement(image, 'roll', 'rad', image_data_dict['roll'])\n write_measurement(image, 'yaw', 'rad', image_data_dict['yaw'])\n write_measurement(image, 'altitude', 'm', image_data_dict['altitude'])\n\n #link the camera to the image\n camera_data_dict = read_camera_data(image_data_dict)\n camera = Camera(**camera_data_dict)\n camera.image = image\n camera.save()\n\n return None", "def dump(self, filename=\".azimint.json\"):\n logger.info(\"Dump!\")\n to_save = { \"poni\": str_(self.poni.text()).strip(),\n \"detector\": str_(self.detector.currentText()).lower(),\n \"wavelength\":float_(self.wavelength.text()),\n \"splineFile\":str_(self.splineFile.text()).strip(),\n \"pixel1\": float_(self.pixel1.text()),\n \"pixel2\":float_(self.pixel2.text()),\n \"dist\":float_(self.dist.text()),\n \"poni1\":float_(self.poni1.text()),\n \"poni2\":float_(self.poni2.text()),\n \"rot1\":float_(self.rot1.text()),\n \"rot2\":float_(self.rot2.text()),\n \"rot3\":float_(self.rot3.text()),\n \"do_dummy\": bool(self.do_dummy.isChecked()),\n \"do_mask\": bool(self.do_mask.isChecked()),\n \"do_dark\": bool(self.do_dark.isChecked()),\n \"do_flat\": bool(self.do_flat.isChecked()),\n \"do_polarization\":bool(self.do_polarization.isChecked()),\n \"val_dummy\":float_(self.val_dummy.text()),\n \"delta_dummy\":float_(self.delta_dummy.text()),\n \"mask_file\":str_(self.mask_file.text()).strip(),\n \"dark_current\":str_(self.dark_current.text()).strip(),\n \"flat_field\":str_(self.flat_field.text()).strip(),\n \"polarization_factor\":float_(self.polarization_factor.value()),\n \"nbpt_rad\":int_(self.nbpt_rad.text()),\n \"do_2D\":bool(self.do_2D.isChecked()),\n \"nbpt_azim\":int_(self.nbpt_azim.text()),\n \"chi_discontinuity_at_0\": bool(self.chi_discontinuity_at_0.isChecked()),\n \"do_solid_angle\": bool(self.do_solid_angle.isChecked()),\n \"do_radial_range\": bool(self.do_radial_range.isChecked()),\n \"do_azimuthal_range\": bool(self.do_azimuthal_range.isChecked()),\n \"do_poisson\": bool(self.do_poisson.isChecked()),\n \"radial_range_min\":float_(self.radial_range_min.text()),\n \"radial_range_max\":float_(self.radial_range_max.text()),\n \"azimuth_range_min\":float_(self.azimuth_range_min.text()),\n \"azimuth_range_max\":float_(self.azimuth_range_max.text()),\n }\n for unit, widget in self.units.items():\n if widget is not None and widget.isChecked():\n to_save[\"unit\"] = unit.REPR\n break\n else:\n logger.warning(\"Undefined unit !!!\")\n try:\n with open(filename, \"w\") as myFile:\n json.dump(to_save, myFile, indent=4)\n except IOError as error:\n logger.error(\"Error while saving config: %s\" % error)\n else:\n logger.debug(\"Saved\")\n return to_save", "def fig_response(fig):\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')", "def test_RawImage_write_out():\n i.write_out()\n # now compare the output with reference\n print i.outpath\n print t.processed_path\n assert_image_equal(i.outpath, t.processed_path)", "def on_get(self, req, resp):\n resp.set_header('Content-Type', 'text/json')\n tif_paths = encode.get_files_in_directory(DIARIES_TO_ENCODE_DIR, \".tif\")\n zip_paths = encode.get_files_in_directory(DIARIES_TO_ENCODE_DIR, \".zip\")\n diaries_paths = tif_paths + zip_paths\n def extract_file_name(path): return os.path.basename(path)\n resp.body = json.dumps({\"diaries\": list(map(extract_file_name, diaries_paths)),\n \"diaries_paths\": diaries_paths})", "def outputs(self):\n return {\"path_to_mapping_json\": File_IO(\n self.node.outputs[0])}", "def addImg(in_dict):\n img = Image(name=in_dict[\"name\"],\n b64str=in_dict[\"b64str\"],\n imgsize=in_dict[\"imgsize\"],\n processed=in_dict[\"processed\"],\n timestamp=in_dict[\"timestamp\"])\n ans = img.save()\n return ans.name", "def save_debug_predict_image(self, scene, debug_dir_uri):\n pass", "def get_imgs_from_json(self):\n # instantiate COCO specifying the annotations json path\n # Specify a list of category names of interest\n catIds = self.coco.getCatIds(catNms=[self.categ])\n print(\"catIds: \", catIds)\n # Get the corresponding image ids and images using loadImgs\n imgIds = self.coco.getImgIds(catIds=catIds)\n images = self.coco.loadImgs(imgIds)\n print(f\"{len(images)} images in '{self.json_path}' with '{self.categ}' instances\")\n self.catIds = catIds # list\n return images", "def output_handler(response, context):\n print(\"Output handler\")\n \n if response.status_code != 200:\n _return_error(response.status_code, response.content.decode('utf-8'))\n response_content_type = context.accept_header\n content = response.content\n\n predictions = json.loads(content.decode('UTF-8'))\n predictions = np.array(predictions[\"predictions\"])\n res = []\n for pred in predictions:\n top3 = (-pred).argsort()[:3]\n res.append({'file_name': 'no-filename', 'path': 'no-path', 'cls': 'actual', 'prediction':top3[0], 'proba_1':pred[top3[0]], 'prediction2':top3[1], 'proba_2':pred[top3[1]], 'prediction3':top3[2], 'proba_3':pred[top3[2]]})\n\n image_index = pd.DataFrame(res)\n image_index['prediction'] = image_index.prediction.map(cls_map)\n image_index['prediction2'] = image_index.prediction2.map(cls_map)\n image_index['prediction3'] = image_index.prediction3.map(cls_map)\n return image_index.to_csv(index=False, header=False), response_content_type", "def get_pic() -> str:\n with open(os.path.dirname(os.path.abspath(__file__))+'\\\\data.json', 'r') as test:\n test = json.load(test)\n pic = test['button_pic']\n return pic", "def process():\n config = read_config()\n \n\n img_dir = config['DEFAULT']['images_directory']\n results_dict = {}\n images = list(get_image_files(img_dir))\n for image in tqdm.tqdm(images):\n info = hash_file(image)\n if info == 0:\n continue\n\n hash_value = info['hash']\n\n if hash_value not in results_dict:\n file_name = os.path.basename(info['_id'])\n results_dict[hash_value] = [file_name, 1]\n else:\n results_dict[hash_value][1] += 1\n\n count = list(results_dict.values())\n sorted_count = sorted(count, key=lambda x: x[1], reverse=True)\n \n with ImagesDB(IMG_INFO_DB_FILENAME) as imgDb: \n imgDb.insert_batch(sorted_count)", "def global_metadata(paths):\n\n # Weakly group images to partition image set size- crucial optimization step\n if os.path.exists(paths.image_preprocess):\n clumped_paths = json.loads(open(paths.image_preprocess).read())\n else:\n clumped_paths = network.alpha_categorize(paths)\n print(\"Hashed source images\")\n\n with open(paths.image_preprocess, 'w') as json_file:\n json.dump(clumped_paths, json_file)\n\n # Combinatorial image grouping to graph\n image_graph = network.load_graph(paths.image_network_path)\n\n total = len(list(chain(*clumped_paths.values())))\n counter = 0.\n\n for image_paths in clumped_paths.values():\n counter += len(image_paths)\n print(str(int(counter / float(total) * 100)) + \"% complete\")\n\n if len(image_paths) > 1:\n image_grouping = images.load_paths(paths.default_patches, image_paths)\n image_graph = metadata.network.network_images(\n image_grouping, threshold=0, network=image_graph)\n else:\n image_graph.add_node(image_paths[0])\n\n metadata.network.save_graph(paths.image_network_path, image_graph)\n print(\"Updated image graph.\")\n\n # Create informational json files for templates and files\n templates.build(paths, image_graph)\n mappings.build(paths, image_graph)\n print(\"Created JSON metadata files.\")", "def get_raw_data(input_path, save_gt=False):\n # define variable for returning\n all_txts = [] # a list, each element is a dictionary\n coords = [] # a list, storing a image's all text region's coordinates which is clockwise\n num_txt = 0\n visual = False\n print('Parsing txt files')\n # txt_directory = os.path.join(input_path, 'text')\n # all_txt_files = [os.path.join(txt_directory, s) for s in os.listdir(txt_directory)]\n txtfiles = input_path + '/*.txt'\n all_txt_files = glob.glob(txtfiles)\n box_num = 0\n for txt in all_txt_files:\n with open(txt, 'r') as f:\n num_txt += 1\n for line in f:\n box_num += 1\n line_split = line.strip().split(',')\n # clockwise\n (x1, y1, x2, y2) = line_split[0:4]\n (x3, y3, x4, y4) = line_split[4:8]\n coords.append((x1, y1, x2, y2, x3, y3, x4, y4))\n txtfilepath = txt\n # using regular expression, get image file path\n # pattern = re.compile('text')\n # img_file_path = pattern.sub('image', txt)\n pattern = re.compile('txt')\n img_file_path = pattern.sub('jpg', txtfilepath)\n txt_data = {'imagePath': img_file_path, 'boxCoord': coords, 'boxNum': box_num}\n box_num = 0\n coords = []\n # image file wheater corresponding to text file, and image file is not empty then add\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path):\n all_txts.append(txt_data)\n # -----------------------visualizing-----------------------------------------\n # draw text region on image and save image\n # print text region on image for comparing gt and predicted results\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path) and save_gt:\n save_groudtruth(cv2.imread(img_file_path), txt_data['boxCoord'], img_file_path)\n\n # draw text region on image and show image\n if os.path.isfile(img_file_path) and os.path.isfile(txtfilepath) \\\n and os.path.getsize(img_file_path) and visual:\n visualize(cv2.imread(img_file_path), txt_data['boxCoord'], img_file_path)\n # -----------------------visualizing-----------------------------------------\n return all_txts, num_txt", "def ref_resp2files(output_file, output_json):\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)", "def vl2img(vl_json_in, fileformat):\n\n # TODO would prefer to do this properly with pipes\n # using | and shell=True is safe though given no arguments\n executables = {\"svg\": \"vg2svg\", \"png\": \"vg2png\", \"pdf\": \"vg2pdf\"}\n try:\n exe = executables[fileformat]\n except KeyError as e:\n print(e.output)\n try:\n return subprocess.check_output(\"vl2vg | %s\" % exe, shell=True, input=vl_json_in)\n except subprocess.CalledProcessError as e:\n print(e.output)", "def _writeDataToFile(self,idata,resultkeys,path,imgname, filename):\n\t\tfilepathname = path+'/'+filename\n\t\tif os.path.exists(filepathname):\n\t\t\tos.remove(filepathname)\n\t\tif idata is None:\n\t\t\treturn\n\t\tresultfile=open(filepathname,'w')\n\t\tresultlines=[]\n\t\tfor info in idata:\n\t\t\tresultline = ''\n\t\t\tfor infokey in resultkeys:\n\t\t\t\ttry:\n\t\t\t\t\t# For data object, save in file as its dbid\n\t\t\t\t\tresult = info[infokey].dbid\n\t\t\t\texcept:\n\t\t\t\t\tresult = info[infokey]\n\n\t\t\t\t# For image, save in file as its filename\n\t\t\t\tif infokey == 'image':\n\t\t\t\t\tresult=imgname\n\n\t\t\t\t# Separate the results by tabs\n\t\t\t\ttry:\n\t\t\t\t\tresultline += str(result) + '\\t'\n\t\t\t\texcept:\n\t\t\t\t\tresultline += '\\t'\n\t\t\tresultlines.append(resultline)\n\t\tresultlinestxt = '\\n'.join(resultlines) +\"\\n\"\n\t\tresultfile.write(resultlinestxt)\n\t\tresultfile.close()", "def model_processing(img):\n\n # assert isinstance(img, EmotionalImage)\n\n if str(img.name).find('json') > -1:\n return\n user = get_user(img.path + '/' + 'meta.json')\n filePath = img.path + '/' + img.name\n # print(\"---------------Processsing----------------\", img.name)\n\n features = extract_features(filePath)\n emotions = predict_emotions(features)\n uuid1 = uuid.uuid4()\n emImage = EmotionalImage(\n uuid1, img.name, img.path, features, emotions, \"\", \"\", \"\")\n user.images.append(emImage)\n # user.save()", "def process_files_json():\n # chdir into beep root\n pwd = os.getcwd()\n os.chdir(os.environ.get(\"BEEP_ROOT\", \"/\"))\n\n meta_list = list(filter(lambda x: '_Metadata.csv' in x, os.listdir(SRC_DIR)))\n file_list = list(filter(lambda x: '.csv' in x if x not in meta_list else None, os.listdir(SRC_DIR)))\n all_list = list(filter(lambda x: '.csv' in x, os.listdir(SRC_DIR)))\n\n all_list = sorted(all_list)\n dumpfn(all_list, \"all_files.json\")\n\n [file_id, mapdf] = init_map(PROJECT_NAME, DEST_DIR)\n\n new_file_index = file_id\n\n for filename in tqdm(sorted(file_list)):\n # If the file has already been renamed another entry should not be made\n if mapdf['filename'].str.contains(filename).sum() > 0:\n continue\n old_file = os.path.join(SRC_DIR, filename)\n new_path = os.path.join(DEST_DIR, PROJECT_NAME)\n shutil.copy(old_file, new_path) # copy main data file\n shutil.copy(old_file.replace(\".csv\", '_Metadata.csv'), new_path) # copy meta data file\n\n if PROJECT_NAME == 'FastCharge':\n [date, channel_no, strname, protocol] = get_parameters_fastcharge(filename, SRC_DIR)\n elif PROJECT_NAME == 'ClosedLoopOED':\n [date, channel_no, strname, protocol] = get_parameters_oed(filename, SRC_DIR)\n else:\n raise ValueError(\"Unsupported PROJECT_NAME: {}\".format(PROJECT_NAME))\n\n df_dup = mapdf.set_index(['protocol', 'date'])\n if (protocol, date) in df_dup.index:\n row = mapdf[(mapdf['protocol'] == protocol) & (mapdf['date'] == date)]\n file_id = row['fid'].iloc[0]\n protocol = row['protocol'].iloc[0]\n date = row['date'].iloc[0]\n strname = row['strname'].iloc[0]\n else:\n file_id = new_file_index\n new_file_index = new_file_index + 1\n\n new_name = \"{}_{}_{}\".format(PROJECT_NAME, f'{file_id:06}', channel_no)\n new_file = os.path.join(DEST_DIR, PROJECT_NAME, \"{}.csv\".format(new_name))\n\n new_row = pd.DataFrame([[file_id, protocol, channel_no, date, strname,\n os.path.abspath(old_file),\n os.path.abspath(new_file)]],\n columns=METADATA_COLUMN_NAMES)\n mapdf = mapdf.append(new_row)\n\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename), new_file)\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename).replace(\".csv\", \"_Metadata.csv\"),\n new_file.replace(\".csv\", \"_Metadata.csv\"))\n\n mapdf.to_csv(os.path.join(DEST_DIR, PROJECT_NAME, PROJECT_NAME + \"map.csv\"), index=False)\n mapdf = mapdf.reset_index(drop=True)\n os.chdir(pwd)\n return json.dumps(mapdf.to_dict(\"list\"))", "def save(self):\n from settings import PROCESSORS\n from .filesystem import makedirs\n\n if self.im is None:\n # If we got here something very strange is going on that I can't even\n # predict.\n return # pragma: no cover\n makedirs(self.output_path)\n for action, arg in self.actions:\n action = PROCESSORS[action]\n if self.frames:\n new_frames = []\n for frame in self.frames:\n new_frames.append(action.process(frame, arg))\n self.frames = new_frames\n else:\n self.im = action.process(self.im, arg)\n\n self.im = optimize.optimize(self.im, fmt=self.format, quality=self.quality)\n\n kwargs = {\n 'format': self.format,\n 'optimize': True,\n 'quality': self.quality,\n }\n if self.format == 'jpeg':\n kwargs['progressive'] = True\n\n if self.filename.startswith('s3://'):\n import cStringIO\n from filesystem import s3\n output = cStringIO.StringIO()\n if self.frames:\n images2gif.write_gif(output, self.frames)\n else:\n self.im.save(output, **kwargs)\n output.reset()\n s3.put_file(output, self.filename)\n else:\n if self.frames:\n images2gif.write_gif(self.filename, self.frames)\n else:\n self.im.save(self.filename, **kwargs)", "def __call__(self, results):\n # Image is bgr\n img = results['img'][..., ::-1]\n img = Image.fromarray(img)\n img = self.transform(img)\n img = np.asarray(img)\n img = img[..., ::-1]\n results['img'] = img\n return results", "def serialize_example(image_inp_string,image_out_string):\n image_inp_shape = tf.image.decode_jpeg(image_inp_string).shape\n image_out_shape = tf.image.decode_jpeg(image_out_string).shape\n feature = {\n\n 'image_input': _bytes_feature(image_inp_string),\n 'image_output':_bytes_feature(image_out_string),\n }\n\n example_proto = tf.train.Example(features=tf.train.Features(feature=feature))\n return example_proto.SerializeToString()\n\n\n #--------------------------------------------------------------------------------------\n\n ###process image", "def _process_image_local(raw_image_path):\n return process_image(\n original_rgb_image=raw.open.as_rgb(raw_image_path),\n original_image_filepath=raw_image_path,\n raw_images_dir=raw_images_dir,\n ROI_definitions=ROI_definitions,\n flat_field_filepath_or_none=flat_field_filepath_or_none,\n save_ROIs=save_ROIs,\n save_dark_frame_corrected_image=save_dark_frame_corrected_images,\n save_flat_field_corrected_image=save_flat_field_corrected_images,\n )", "def csv_images_to_api(self):\n for data in self._api_data:\n if \"images\" in data:\n data[\"images\"] = self.map_csv_images(data[\"images\"])", "def write_results(input_dict):\n out_imgs = {}\n for mol in input_dict:\n mols = []\n for new_mols in input_dict[mol]:\n m = Chem.MolFromSmiles(new_mols)\n mols.append(m)\n if len(mols) > 2:\n p = Chem.MolFromSmarts(MCS.FindMCS(mols).smarts)\n AllChem.Compute2DCoords(p)\n for m in mols:\n AllChem.GenerateDepictionMatching2DStructure(m, p)\n out_imgs[mol] = Draw.MolsToGridImage(mols, useSVG=True)\n # Write out the image\n return out_imgs", "def save_image(self):\n self.table_to_image.img.save(self.file_name)\n aws.AWSHandler().upload_image(self.file_name)", "def save_step_1(imgs, output_path='./output/step1'):\n # ... your code here ...\n i=0\n for each in imgs:\n i+=1\n cv2.imwrite(output_path+\"/output\"+str(i)+\".jpg\", each)", "def save(self, output, data):", "def test_plot_images(self):\n save_file(self.quart.plot_images)", "def main_recognition():\n if request.method == 'POST':\n # print(request.url)\n # stream = BytesIO(request.data)\n # image = Image.open(stream).convert(\"RGBA\")\n # path = 'C:/Users/13/Documents/FRS_v1/path.png'\n # image = image.save(path)\n # stream.close()\n #df = faces_info_export(path)\n print(request.url)\n stream = BytesIO(request.data)\n img_pil=Image.open(stream).convert(\"RGB\")\n stream.close()\n img_cv=np.array(img_pil)\n try:\n df = faces_info_export(img_cv)\n return df.to_json(orient='index')\n except SystemError as er:\n \tprint(er)\n \treturn json.dumps({'msg':'error'})\n except AttributeError as er:\n \tprint(er)\n \treturn json.dumps({'msg':'error'})\n if request.method == 'GET':\n # ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg'}\n df = faces_info_export(\"C:/Users/13/Documents/FRS_v1/test_image.jpg\")\n return df.to_json(orient='index')", "def export_results(mask, out):\n ret, markers = cv.connectedComponents(mask.astype(np.uint8))\n # should we save the segments as a mask or as bounding boxes?\n if out.endswith('.npy'):\n np.save(out, markers)\n elif out.endswith('.json'):\n # import extra required modules\n from imantics import Mask\n import import_labelme\n segments = [\n (int(i), largest_polygon(Mask(markers == i).polygons()).tolist())\n for i in range(1, ret)\n ]\n import_labelme.write(out, segments, args.image)\n else:\n raise Exception(\"Unsupported output file format.\")", "def add_image(self, f_name,file,new_id):\r\n folder=tempfile.mktemp()\r\n os.mkdir(folder)\r\n datei=open(folder+'/'+f_name,'w+')\r\n datei.write(file.read())\r\n datei.close()\r\n val='' \r\n liste_ext=liste_val\r\n if(self.toolbox.hasProperty('eigene_formate')):\r\n self_val=self.toolbox.getProperty('eigene_formate').split(',')\r\n liste_ext=[]\r\n for x in self_val:\r\n liste_ext.append('_'+x+'.jpeg')\r\n for extension in liste_ext:\r\n #cmd='/usr/bin/convert '+folder+'/'+f_name+' -resize '+extension[1:-4]+'x'+extension[1:-4]+' '+folder+'/'+new_id+extension\r\n cmd='/usr/bin/convert '+folder+'/'+f_name+' -resize '+extension[1:-4]+' '+folder+'/'+new_id+extension\r\n order=os.popen(cmd).read()\r\n kurz_name='_'+str(f_name.split('.')[0])\r\n kurz_name=kurz_name.replace(' ','_')\r\n val=val+self.manage_addImage(id=new_id+kurz_name+extension,file=open(folder+'/'+new_id+extension),title=f_name, precondition='', content_type='',REQUEST=None)+' ' \r\n os.remove(folder+'/'+new_id+extension)\r\n os.remove(folder+'/'+f_name)\r\n os.rmdir(folder)\r\n txt=\"Datei Hochgeladen!<br>\"\r\n #my_root=self.toolbox\r\n #txt+=my_root.id+\"<br>\"\r\n #if(my_root.hasProperty('eigene_formate')):\r\n # txt+=my_root.getProperty('eigene_formate')+\"<br>\"\r\n return txt" ]
[ "0.64531106", "0.64168984", "0.6302975", "0.6163437", "0.6081732", "0.6022132", "0.5954592", "0.59521526", "0.5950262", "0.59386396", "0.5938475", "0.58703864", "0.58624244", "0.5849132", "0.5811577", "0.5798862", "0.579062", "0.5771886", "0.57512766", "0.5739413", "0.5733601", "0.5725228", "0.56866515", "0.56788784", "0.5669759", "0.5667863", "0.56432307", "0.5630034", "0.56109154", "0.56055444", "0.5567559", "0.55628353", "0.5546093", "0.55392206", "0.5527114", "0.5523172", "0.5519615", "0.551684", "0.5499845", "0.5499241", "0.54897267", "0.548369", "0.5476209", "0.5469306", "0.54477227", "0.54468316", "0.54442936", "0.54374194", "0.5435353", "0.5432535", "0.54224175", "0.5419641", "0.54116404", "0.5380863", "0.53806895", "0.5376414", "0.53670233", "0.5360021", "0.5357354", "0.53461415", "0.53368664", "0.5325583", "0.5321873", "0.53151876", "0.5315144", "0.5312918", "0.5311745", "0.5311046", "0.5310674", "0.5309729", "0.5309585", "0.5306953", "0.53058094", "0.5305507", "0.53028506", "0.53022337", "0.5301217", "0.52993065", "0.5282629", "0.5280166", "0.52784747", "0.52661717", "0.5265291", "0.5264238", "0.52639735", "0.52585644", "0.52555126", "0.5248872", "0.52417773", "0.5240399", "0.52399224", "0.5226412", "0.52259076", "0.52241", "0.5219531", "0.52111346", "0.52110255", "0.52060604", "0.5202141", "0.5199396", "0.51935863" ]
0.0
-1
Converts either bytes or unicode to `bytes`, using utf8 encoding for text.
def as_bytes(bytes_or_text, encoding='utf-8'): if isinstance(bytes_or_text, _six.text_type): return bytes_or_text.encode(encoding) elif isinstance(bytes_or_text, bytes): return bytes_or_text else: raise TypeError('Expected binary or unicode string, got %r' % (bytes_or_text,))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _to_bytes(value: Union[str, bytes]) -> bytes:\n return value if isinstance(value, bytes) else value.encode(\"utf-8\")", "def ensure_utf8_bytes(v: Union[str, bytes]) -> bytes:\n if isinstance(v, str):\n v = v.encode(\"utf-8\")\n return v", "def force_utf8(text):\n if isinstance(text, binary_type):\n return text\n else:\n return text.encode('utf-8')", "def decode_to_utf8(text) -> bytes: # pragma: no cover\n try:\n return text.decode(\"utf-8\")\n except (AttributeError, UnicodeEncodeError):\n return text", "def _as_bytes(s):\n if isinstance(s, bytes):\n return s\n return bytes(s, encoding='latin_1')", "def to_utf8(text, errors='strict', encoding='utf8'):\n if isinstance(text, unicode):\n return text.encode('utf8')\n # do bytestring -> unicode -> utf8 full circle, to ensure valid utf8\n else:\n return unicode(text, encoding, errors=errors).encode('utf8')", "def asbytes(s):\n if isinstance(s, bytes):\n return s\n else:\n return s.encode('utf-8')", "def str_to_bytes(data):\n u_type = type(b''.decode('utf8'))\n if isinstance(data, u_type):\n return data.encode('utf8')\n return data", "def to_bytes(data):\n if isinstance(data, unicode):\n return data.encode('utf-8')\n else:\n return data", "def utf8(value):\n if isinstance(value, (bytes, type(None))):\n return value\n if not isinstance(value, unicode_type):\n raise TypeError(\n \"Expected bytes, unicode, or None; got %r\" % type(value)\n )\n return value.encode(\"utf-8\")", "def ensure_bytes(data, encoding=\"utf8\"):\n return data if isinstance(data, bytes) else unicode_type(data).encode(encoding)", "def to_bytes(bytes_or_str):\r\n if isinstance(bytes_or_str, str):\r\n value = bytes_or_str.encode('utf-8')\r\n else:\r\n value = bytes_or_str\r\n return value", "def to_bytes(data):\n if isinstance(data, str):\n return data.encode(encoding='utf-8')\n else:\n return data", "def to_bytes(something, encoding='utf8') -> bytes:\n if isinstance(something, bytes):\n return something\n if isinstance(something, str):\n return something.encode(encoding)\n elif isinstance(something, bytearray):\n return bytes(something)\n else:\n raise TypeError(\"Not a string or bytes like object\")", "def str_to_bytes(self, data):\n if isinstance(data, bytes):\n return data\n return data.encode(\"utf-8\")", "def to_utf8(text, charset='iso-8859-15'):\n try:\n # Do nothing if it's already utf-8\n u = unicode(text, 'utf-8')\n return text\n except UnicodeError:\n try:\n # Use the user supplied charset if possible\n u = unicode(text, charset)\n except UnicodeError:\n # This should always work\n u = unicode(text, 'iso-8859-15')\n return u.encode('utf-8')\n except TypeError:\n return text", "def as_utf8(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.UnicodeType):\n return value.encode('utf-8')\n else:\n return value", "def give_me_bytes(string):\n return string.encode('utf8') if isinstance(string, str) else string", "def as_text(bytes_or_text, encoding='utf-8'):\n if isinstance(bytes_or_text, _six.text_type):\n return bytes_or_text\n elif isinstance(bytes_or_text, bytes):\n return bytes_or_text.decode(encoding)\n else:\n raise TypeError(\n 'Expected binary or unicode string, got %r' % bytes_or_text\n )", "def utf8(value):\r\n if isinstance(value, _UTF8_TYPES):\r\n return value\r\n elif isinstance(value, unicode_type):\r\n return value.encode(\"utf-8\")\r\n else:\r\n return str(value)", "def utf8(value):\r\n if isinstance(value, six.text_type):\r\n return value.encode('utf-8')\r\n assert isinstance(value, str)\r\n return value", "def to_bytes(string):\n assert isinstance(string, basestring)\n if sys.version_info[0] >= 3:\n if isinstance(string, str):\n return string.encode('utf-8')\n else:\n return string\n else:\n if isinstance(string, unicode):\n return string.encode('utf-8')\n else:\n return string", "def bytes_string(text, encode=\"utf-8\"):\n if not PY3:\n if isinstance(text, unicode): # pylint: disable=undefined-variable\n result = text.encode(encode)\n else:\n result = text\n else:\n if isinstance(text, bytes):\n result = text\n else:\n result = bytes(text, encode)\n return result", "def ensure_bytes(value: AnyStr) -> bytes:\n if isinstance(value, bytes):\n return value\n if isinstance(value, str):\n return value.encode('utf-8')\n raise TypeError(f\"input must be str or bytes, got {type(value).__name__}\")", "def _as_bytes(s):\n if isinstance(s, bytes):\n return s\n # Assume it is a unicode string\n # Note ISO-8859-1 aka Latin-1 preserves first 256 chars\n return codecs.latin_1_encode(s)[0]", "def test_bytes_encoding_arg(self):\n u = u'Unicode string: \\u5b54\\u5b50'\n b = py23_bytes(u, encoding='utf-8')\n self.assertEqual(b, u.encode('utf-8'))", "def ensure_bytes(str_or_bytes, binary_type=six.binary_type,\n text_type=six.text_type):\n if isinstance(str_or_bytes, binary_type):\n return str_or_bytes\n if isinstance(str_or_bytes, text_type):\n return str_or_bytes.encode('utf-8')\n raise TypeError(\n \"input must be a text or byte string, got {}\"\n .format(type(str_or_bytes).__name__))", "def toBytes(data):\n\tif isBytes(data):\n\t\treturn data\n\telse:\n\t\treturn data.encode(\"latin-1\")", "def force_bytes(value):\n if IS_PY3:\n if isinstance(value, str):\n value = value.encode(\"utf-8\", \"backslashreplace\")\n else:\n if isinstance(value, unicode): # NOQA: F821\n value = value.encode(\"utf-8\")\n\n return value", "def ensure_bytes(s, encoding):\n if isinstance(s, bytes):\n return s\n return s.encode(encoding)", "def test_bytes_encoding_arg_non_kwarg(self):\n u = u'Unicode string: \\u5b54\\u5b50'\n b = py23_bytes(u, 'utf-8')\n self.assertEqual(b, u.encode('utf-8'))", "def to_utf8(self, _string):\n if isinstance(_string, unicode):\n _string = _string.encode(\"UTF-8\")\n return _string", "def tob(data, enc='utf8'):\r\n return data.encode(enc) if isinstance(data, unicode) else bytes(data)", "def to_bytes(value: str) -> bytes:\n if value is not None:\n out = value.encode(\"utf-8\")\n else:\n out = b\"\"\n\n return out", "def unicode2utf8(arg):\n\n try:\n if isinstance(arg, unicode):\n return arg.encode('utf-8')\n except NameError:\n pass # Python 3\n return arg", "def cast_unicode(s, encoding='utf-8'):\n if isinstance(s, bytes) and not PY3:\n return s.decode(encoding, \"replace\")\n return s", "def convert_to_unicode(text):\n if six.PY3:\n if isinstance(text, str):\n return text\n elif isinstance(text, bytes):\n return text.decode(\"utf-8\", \"ignore\")\n else:\n raise ValueError(\n \"Unsupported string type: %s\" % (type(text))\n ) # pragma: no cover", "def to_unicode(text, encoding='utf-8', errors='strict'):\r\n if isinstance(text, bytes):\r\n return text.decode(encoding, errors=errors)\r\n return text", "def unicode2utf8(s):\n return s.encode(encoding='utf-8', errors='ignore')", "def test_unicode2bytes():\n if backwards.PY2: # pragma: Python 2\n res = backwards.bytes_type('hello')\n backwards.assert_bytes(res)\n nt.assert_equal(backwards.unicode2bytes('hello'), res)\n nt.assert_equal(backwards.unicode2bytes(unicode('hello')), res)\n nt.assert_equal(backwards.unicode2bytes(bytearray('hello', 'utf-8')), res)\n nt.assert_raises(TypeError, backwards.unicode2bytes, 1)\n else: # pragma: Python 3\n res = backwards.bytes_type('hello', 'utf-8')\n backwards.assert_bytes(res)\n nt.assert_equal(backwards.unicode2bytes('hello'), res)\n nt.assert_equal(backwards.unicode2bytes(b'hello'), res)\n nt.assert_equal(backwards.unicode2bytes(bytearray('hello', 'utf-8')), res)\n nt.assert_raises(TypeError, backwards.unicode2bytes, 1)", "def to_unicode(value):\r\n if isinstance(value, _TO_UNICODE_TYPES):\r\n return value\r\n if not isinstance(value, bytes_type):\r\n raise TypeError(\r\n \"Expected bytes, unicode, or None; got %r\" % type(value)\r\n )\r\n return value.decode(\"utf-8\")", "def str_to_bytes(s, encoding='ascii'):\n return s.encode(encoding)", "def convert_to_unicode(text):\n if six.PY3:\n if isinstance(text, str):\n return text\n elif isinstance(text, bytes):\n return text.decode(\"utf-8\", \"ignore\")\n else:\n raise ValueError(\"Unsupported string type: %s\" % (type(text)))\n elif six.PY2:\n if isinstance(text, str):\n return text.decode(\"utf-8\", \"ignore\")\n elif isinstance(text, unicode):\n return text\n else:\n raise ValueError(\"Unsupported string type: %s\" % (type(text)))\n else:\n raise ValueError(\"Not running on Python2 or Python 3?\")", "def convert_to_unicode(text):\n if six.PY3:\n if isinstance(text, str):\n return text\n elif isinstance(text, bytes):\n return text.decode(\"utf-8\", \"ignore\")\n else:\n raise ValueError(\"Unsupported string type: %s\" % (type(text)))\n elif six.PY2:\n if isinstance(text, str):\n return text.decode(\"utf-8\", \"ignore\")\n elif isinstance(text, unicode):\n return text\n else:\n raise ValueError(\"Unsupported string type: %s\" % (type(text)))\n else:\n raise ValueError(\"Not running on Python2 or Python 3?\")", "def _to_unicode(text):\n # both str and unicode inherit from basestring\n if not isinstance(text, basestring):\n tmpl = 'expected UTF-8 encoded string or unicode, got %s value %s'\n raise TypeError(tmpl % (type(text), text))\n # return unicode strings unchanged\n if isinstance(text, unicode):\n return text\n # otherwise assume UTF-8 encoding, which also works for ASCII\n return unicode(text, 'utf-8')", "def to_unicode(s, encoding=\"utf-8\"):\n if isinstance(s, six.text_type):\n return s\n elif isinstance(s, bytes):\n return s.decode(encoding)\n # TODO: warning? Exception?\n return s", "def ensure_unicode(data, encoding=\"utf8\"):\n if isinstance(data, bytes):\n return data.decode(encoding)\n else:\n return unicode_type(data)", "def polite_bytes(a_string):\n if is_py3():\n try:\n return bytes(a_string, 'utf-8')\n except TypeError:\n return a_string\n\n return a_string", "def convert_to_unicode(text):\n if six.PY3:\n if isinstance(text, str):\n return text\n elif isinstance(text, bytes):\n return text.decode(\"utf-8\", \"ignore\")\n else:\n raise ValueError(\"Unsupported string type: %s, %s\" % (type(text), text))\n elif six.PY2:\n if isinstance(text, str):\n return text.decode(\"utf-8\", \"ignore\")\n elif isinstance(text, unicode):\n return text\n else:\n raise ValueError(\"Unsupported string type: %s\" % (type(text)))\n else:\n raise ValueError(\"Not running on Python2 or Python 3?\")", "def _encode_to_utf8(s):\n return s.encode('utf-8')", "def encode_utf8(string):\n if isinstance(string, unicode):\n try: \n return string.encode(\"utf-8\")\n except:\n return string\n return str(string)", "def test_to_Bytes(self) -> None:\n self.assertEqual(to_bytes('Hello'),\n bytearray('Hello', 'utf-8'),\n \"Check that to_bytes creates byte array when presented with non byte string.\")", "def asunicode(s):\n if isinstance(s, bytes):\n return s.decode('utf-8', 'replace')\n else:\n return s", "def to_unicode(data):\n if isinstance(data, bytes):\n return data.decode('utf-8')\n else:\n return data", "def kafka_bytestring(s):\n if isinstance(s, bytes):\n return s\n if isinstance(s, bytes):\n return s.encode('utf-8')\n raise TypeError(s)", "def force_utf8(data):\n if isinstance(data, unicode):\n return data.encode(\"utf-8\")\n elif isinstance(data, list):\n return [force_utf8(i) for i in data]\n elif isinstance(data, dict):\n return {force_utf8(i): force_utf8(data[i]) for i in data}\n return data", "def utf8(unicode_str):\n if six.PY2 and isinstance(unicode_str, __unicode__):\n return unicode_str.encode('utf-8')\n\n return unicode_str", "def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes:\n if isinstance(value, str):\n try:\n return value.encode(\"ascii\")\n except UnicodeEncodeError:\n raise TypeError(f\"{name} strings may not include unicode characters.\")\n elif isinstance(value, bytes):\n return value\n\n seen_type = type(value).__name__\n raise TypeError(f\"{name} must be bytes or str, but got {seen_type}.\")", "def binary_encoding(string, encoding = 'utf-8'):\n try:\n return bytes(string, encoding)\n except TypeError: # We are in Python 2\n return str(string)", "def ToBytes(string):\n if sys.version_info[0] >= 3:\n return string.encode('utf-8')\n return string", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, unicode):\n return text\n return unicode(text, encoding, errors=errors)", "def bytes_to_str(s, encoding='utf-8'):\n if six.PY3 and isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def bytes_to_str(s, encoding='utf-8'):\n if six.PY3 and isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def bytes_(s: Any, encoding: str = 'utf-8', errors: str = 'strict') -> Any:\n if isinstance(s, int):\n s = str(s)\n if isinstance(s, str):\n return s.encode(encoding, errors)\n return s", "def bytes_(s: Any, encoding: str = 'utf-8', errors: str = 'strict') -> Any:\n if isinstance(s, int):\n s = str(s)\n if isinstance(s, str):\n return s.encode(encoding, errors)\n return s", "def force_unicode(value):\n if IS_PY3:\n # Python 3.X\n if isinstance(value, bytes):\n value = value.decode(\"utf-8\", errors=\"replace\")\n elif not isinstance(value, str):\n value = str(value)\n else:\n # Python 2.X\n if isinstance(value, str):\n value = value.decode(\"utf-8\", \"replace\")\n elif not isinstance(value, basestring): # NOQA: F821\n value = unicode(value) # NOQA: F821\n\n return value", "def utf8tounicode(arg):\n\n try:\n if isinstance(arg, unicode):\n return arg.decode('utf-8')\n except NameError:\n pass # Python 3\n return arg", "def test_bytes2unicode():\n if backwards.PY2: # pragma: Python 2\n res = backwards.unicode_type('hello')\n backwards.assert_unicode(res)\n nt.assert_equal(backwards.bytes2unicode('hello'), res)\n nt.assert_equal(backwards.bytes2unicode(unicode('hello')), res)\n nt.assert_equal(backwards.bytes2unicode(bytearray('hello', 'utf-8')), res)\n nt.assert_raises(TypeError, backwards.bytes2unicode, 1)\n else: # pragma: Python 3\n res = 'hello'\n backwards.assert_unicode(res)\n nt.assert_equal(backwards.bytes2unicode('hello'), res)\n nt.assert_equal(backwards.bytes2unicode(b'hello'), res)\n nt.assert_equal(backwards.bytes2unicode(bytearray('hello', 'utf-8')), res)\n nt.assert_raises(TypeError, backwards.bytes2unicode, 1)", "def text_(s, encoding='utf-8', errors='strict'):\n if isinstance(s, binary_type):\n return s.decode(encoding, errors)\n return s # pragma: no cover", "def as_bytes(string):\n return same_string_type_as(b\"\", string)", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, str):\n return text\n return str(text, encoding, errors=errors)", "def bytes_to_str(s, encoding='utf-8'):\n if isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def force_unicode(s, encoding=encoding, errors='strict'):\n if isinstance(s, unicode):\n return s\n elif hasattr(s, '__unicode__'):\n return unicode(s)\n elif isinstance(s, str):\n return s.decode(encoding, errors)\n else:\n return str(s).decode(encoding, errors)", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, unicode):\n return text\n else:\n return unicode(text, encoding=encoding, errors=errors)", "def s2b(s):\n return s.encode('utf-8')", "def to_unicode(text, encoding='utf8', errors='strict'):\r\n if isinstance(text, unicode):\r\n return text\r\n return unicode(text, encoding, errors=errors)", "def _force_unicode(data):\n try:\n data = unicode(data, \"utf-8\")\n except UnicodeDecodeError:\n data = unicode(data, \"latin1\")\n return data", "def appium_bytes(value, encoding):\n\n try:\n return bytes(value, encoding) # Python 3\n except TypeError:\n return value # Python 2", "def force_unicode(s):\n return (s.decode('utf8')\n if isinstance(s, str)\n else unicode(s))", "def text_to_bytes(text, encoding='UTF-8', size=None):\n res = str(text).encode(encoding)\n if size:\n res = res.rjust(size, b'\\x00')\n return res", "def bytes2str(val):\n if isinstance(val, bytes):\n return str(val, \"utf8\")\n else:\n return val", "def encode(value, encoding='utf-8') -> bytes:\n return value if isinstance(value, bytes) else str(value).encode(encoding)", "def ensure_native_str(data, encoding=\"utf8\"):\n if isinstance(data, str):\n return data\n elif isinstance(data, unicode_type):\n # Only executes on python 2\n return data.encode(encoding)\n elif isinstance(data, bytes):\n # Only executes on python 3\n return data.decode(encoding)\n else:\n str(data)", "def ensure_binary(s, encoding='utf-8', errors='strict'):\n if isinstance(s, six.text_type):\n return s.encode(encoding, errors)\n elif isinstance(s, six.binary_type):\n return s\n else:\n raise TypeError(\"not expecting type '%s'\" % type(s))", "def test_unicode_converted_to_utf8(self):\n class ResponseGenerator(object):\n def __init__(self):\n self.requests = []\n\n def response(self, *args, **kwargs):\n self.requests.append((args, kwargs))\n return MockRequestsResponse(200, content=\"Success!\")\n\n generator = ResponseGenerator()\n url = \"http://foo\"\n response = HTTP._request_with_timeout(\n url, generator.response, url, \"POST\",\n headers = { u\"unicode header\": u\"unicode value\"},\n data=u\"unicode data\"\n )\n [(args, kwargs)] = generator.requests\n url, method = args\n headers = kwargs['headers']\n data = kwargs['data']\n\n # All the Unicode data was converted to bytes before being sent\n # \"over the wire\".\n for k,v in headers.items():\n assert isinstance(k, bytes)\n assert isinstance(v, bytes)\n assert isinstance(data, bytes)", "def force_text(s: Union[str, bytes], encoding=\"utf-8\", errors=\"strict\") -> str:\n if isinstance(s, str):\n return s\n\n try:\n if not isinstance(s, str):\n if isinstance(s, bytes):\n s = str(s, encoding, errors)\n else:\n s = str(s)\n else:\n s = s.decode(encoding, errors)\n except UnicodeDecodeError as e:\n raise e\n return s", "def to_unicode(string):\n assert isinstance(string, basestring)\n if sys.version_info[0] >= 3:\n if isinstance(string, bytes):\n return string.decode('utf-8')\n else:\n return string\n else:\n if isinstance(string, str):\n return string.decode('utf-8')\n else:\n return string", "def bytes_string_to_bytes(bytes_string):\n return bytes(bytes_string.split(\"'\")[1], 'utf-8')", "def recodeToUtf8(data):\n try:\n data = data.decode('utf8').encode('utf8')\n return data\n except UnicodeDecodeError:\n encoding = chardet.detect(data)['encoding']\n logging.log(5, 'encoding should be %s' % encoding)\n if encoding == None:\n encoding = 'latin1'\n try:\n data = data.decode(encoding).encode('utf8')\n except UnicodeDecodeError:\n logging.warn('Error when decoding as %s' % encoding)\n data = data\n except LookupError:\n logging.warn('Unknown encoding when decoding as %s' % encoding)\n data = data\n\n return data\n\n return", "def _text(string, encoding='utf8'):\n if isinstance(string, six.text_type):\n return string\n elif isinstance(string, six.binary_type):\n return string.decode(encoding)\n else:\n return six.text_type(string)", "def _bytes_bytearray_to_str(s):\n if isinstance(s, (bytes, bytearray)):\n return s.decode()\n return s", "def FromUnicode(val):\n if sys.version_info[0] >= 3:\n return val\n return val if isinstance(val, str) else val.encode('utf-8')", "def write_utf8_string(self, u):\n if isinstance(u, six.text_type):\n u = u.encode(\"utf-8\")\n elif isinstance(u, bytearray):\n u = six.binary_type(u)\n if not isinstance(u, six.binary_type):\n raise TypeError(\"Expected a string, got %r\" % (u,))\n self.write(u)", "def to_utf8(x):\r\n if isinstance(x, basestring): \r\n return x.encode('utf-8') if isinstance(x, unicode) else x\r\n try:\r\n l = iter(x)\r\n except TypeError:\r\n return x\r\n return [to_utf8(i) for i in l]", "def _decode_utf8(value):\n try:\n return value if not isinstance(value, bytes) else value.decode('utf-8', 'ignore')\n except UnicodeDecodeError:\n return None", "def str_to_unicode(text, encoding='utf-8'):\n if isinstance(text, str):\n return text.decode(encoding)\n elif isinstance(text, unicode):\n return text\n else:\n raise TypeError('str_to_unicode must receive a str or unicode object, got %s' % type(text).__name__)", "def to_unicode(value, default=u''):\n try:\n if isinstance(value, unicode):\n return value\n return codecs.decode(value or default)\n except Exception:\n return codecs.decode(value or default, 'latin1')", "def to_unicode(x):\n try: # This may never fail, but let's be safe\n encoding = locale.getpreferredencoding()\n except:\n encoding = 'utf-8'\n ret = x.decode(encoding, 'replace').encode('utf-8')\n return ret", "def as_unicode(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.StringType):\n return value.decode('utf-8')\n else:\n return value", "def _encode_safely(s):\n if isinstance(s, unicode):\n s = s.encode('utf-8')\n return s" ]
[ "0.78761524", "0.78181183", "0.78134924", "0.77963704", "0.7775173", "0.77212256", "0.7695007", "0.76554793", "0.76551324", "0.76297563", "0.7587289", "0.75190413", "0.74554324", "0.74133134", "0.74104327", "0.74089986", "0.73655003", "0.73444015", "0.72940016", "0.72861797", "0.72822374", "0.7224943", "0.7195027", "0.7165689", "0.7132938", "0.7130741", "0.7130097", "0.71165246", "0.706306", "0.7029265", "0.7018126", "0.6986508", "0.6971141", "0.69654566", "0.6964847", "0.6934701", "0.693425", "0.6918395", "0.6914401", "0.6894375", "0.687751", "0.68580467", "0.6834378", "0.6834378", "0.6825647", "0.6814983", "0.68046194", "0.6787599", "0.6741475", "0.67329484", "0.6700458", "0.66911995", "0.66800547", "0.66766524", "0.6651788", "0.6613859", "0.66076404", "0.6601661", "0.6580877", "0.65653193", "0.65543246", "0.654984", "0.654984", "0.6544514", "0.6544514", "0.6543795", "0.6542518", "0.65419686", "0.65339315", "0.653001", "0.651003", "0.65094477", "0.65040106", "0.6500664", "0.64875746", "0.6487112", "0.6479785", "0.64742124", "0.6467222", "0.646577", "0.64293593", "0.64085937", "0.64046025", "0.639463", "0.6392308", "0.63859874", "0.6368687", "0.6357256", "0.6349085", "0.6328104", "0.6315006", "0.6303929", "0.6302524", "0.63014907", "0.62895256", "0.6280823", "0.627481", "0.6263351", "0.62581366", "0.6257287" ]
0.7902959
0
Returns the given argument as a unicode string.
def as_text(bytes_or_text, encoding='utf-8'): if isinstance(bytes_or_text, _six.text_type): return bytes_or_text elif isinstance(bytes_or_text, bytes): return bytes_or_text.decode(encoding) else: raise TypeError( 'Expected binary or unicode string, got %r' % bytes_or_text )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def safe_unicode(arg, *args, **kwargs):\n return arg if isinstance(arg, str) else str(arg, *args, **kwargs)", "def unicode2utf8(arg):\n\n try:\n if isinstance(arg, unicode):\n return arg.encode('utf-8')\n except NameError:\n pass # Python 3\n return arg", "def __str__(self):\n if not six.PY3:\n return unicode(self.args[0]).encode('utf-8')\n\n return self.args[0]", "def utf8tounicode(arg):\n\n try:\n if isinstance(arg, unicode):\n return arg.decode('utf-8')\n except NameError:\n pass # Python 3\n return arg", "def plain_text( argument ):\n return str( argument )", "def encode_arg(arg):\n arg_utf8 = utf8(arg)\n\n return ELEM_SEP.join([str(len(str(arg_utf8))), str(arg_utf8)])", "def toString(self) -> unicode:\n ...", "def toString(self) -> unicode:\n ...", "def u(s):\n if _IS_PYTHON_3 or type(s) == unicode:\n return s\n else:\n return codecs.unicode_escape_decode(s)[0]", "def to_unicode(data):\n return to_string(data)", "def u(obj):\n return obj if isinstance(obj, unicode) else unicode(obj) # noqa: F821 pylint: disable=undefined-variable", "def encodeString(*args, **kwargs)->AnyStr:\n pass", "def unicode_parm(cap, *params):\n return tparm(tigetstr(cap), *params).decode('latin1')", "def __unicode__(self):\n return unicode(self.args[0])", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def safe_unicode(obj, *args):\n try:\n return unicode(obj, *args)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(obj).encode('string_escape')\n return unicode(ascii_text)", "def astr(obj):\n\treturn unicode(obj).encode(\"ascii\", \"replace\")", "def su(value):\n return safe_unicode(value, encoding=get_charset())", "def u(obj):\n return obj if isinstance(obj, str) else str(obj)", "def utfstr(stuff):\n if isinstance(stuff,basestring):\n return stuff\n else:\n return str(stuff)", "def _unicodeify(self, value, encoding=\"utf8\"):\n if isinstance(value, str):\n return value\n return str(value, encoding)", "def _escape(arg):\n if type(arg) == str:\n arg = \"'%s'\" % _escape_string(arg)\n elif type(arg) == unicode:\n arg = \"'%s'\" % _escape_string(arg).encode('utf8')\n elif arg is None:\n arg = 'null'\n else:\n arg = str(arg)\n return arg", "def xstr(s):\n if s is None:\n return u''\n return s", "def _func_serialize(self, args): # pragma: no cover\n return repr(args).encode(\"utf-8\")", "def _tostr(t):\n\treturn t.__unicode__()", "def EncodePOSIXShellArgument(argument):\n\n if not isinstance(argument, str):\n argument = str(argument)\n\n if _quote.search(argument):\n quote = '\"'\n else:\n quote = ''\n\n encoded = quote + re.sub(_escape, r'\\\\\\1', argument) + quote\n\n return encoded", "def as_unicode(string):\n return same_string_type_as(\"\", string)", "def to_unicode(value):\r\n if isinstance(value, _TO_UNICODE_TYPES):\r\n return value\r\n if not isinstance(value, bytes_type):\r\n raise TypeError(\r\n \"Expected bytes, unicode, or None; got %r\" % type(value)\r\n )\r\n return value.decode(\"utf-8\")", "def text(self, value):\n return unicode(value) if sys.version_info < (3,) else str(value)", "def func_serialize(self, args): # pragma: no cover\n return repr(args).encode(\"utf-8\")", "def getUniStr(self):\n return(\"%s/%s\"%(self.token.id,self.type))", "def _printstr(self, args):\n s = \"\\n\"\n\n for arg in args:\n #s += arg.encode('utf-8', 'pout.replace')\n s += arg\n\n return s", "def safe_unicode(e):\n try:\n return str(e)\n except UnicodeError:\n pass\n\n try:\n return repr(e)\n except UnicodeError:\n pass\n\n return u'Unrecoverably corrupt evalue'", "def __unicode__(self):\n\t\targs = [str(arg) for arg in self.arguments]\n\t\targs = \", \".join(args)\n\t\tmsg = 'Placeholder function call \"%s\"(%s)' % (self.name, args)\n\t\tif self.namespace_parts:\n\t\t\tns = self._namespace_to_unicode()\n\t\t\tmsg = \"%s at %s\" % (msg, ns)\n\t\treturn msg", "def ltibetanToUnicode(*args):\n import string\n\n # The context variable is of type XScriptContext and is available to\n # all BeanShell scripts executed by the Script Framework\n xModel = XSCRIPTCONTEXT.getDocument()\n\n #the writer controller impl supports the css.view.XSelectionSupplier interface\n xSelectionSupplier = xModel.getCurrentController()\n\n #see section 7.5.1 of developers' guide\n xIndexAccess = xSelectionSupplier.getSelection()\n count = xIndexAccess.getCount();\n if(count>=1): #ie we have a selection\n i=0\n\twhile i < count :\n xTextRange = xIndexAccess.getByIndex(i);\n theString = xTextRange.getString();\n if len(theString)!=0 :\n newString = getNewString( theString );\n if newString:\n xTextRange.setString(newString);\n xSelectionSupplier.select(xTextRange);\n\t i+= 1", "def make_unicode(string):\n if sys.version < '3' and isinstance(string, str):\n return unicode(string.decode('utf-8'))\n\n return string", "def UnitKind_toString(*args):\n return _libsbml.UnitKind_toString(*args)", "def utf8(value):\r\n if isinstance(value, _UTF8_TYPES):\r\n return value\r\n elif isinstance(value, unicode_type):\r\n return value.encode(\"utf-8\")\r\n else:\r\n return str(value)", "def touni(x, enc='utf8', err='strict'):\r\n return x if isinstance(x, unicode) else unicode(str(x), enc, err)", "def as_unicode(obj):\n if sys.version_info.major < 3 and isinstance(obj, str):\n obj = obj.decode('utf-8')\n return unicode(obj)", "def _index_to_unicode(cls, index: int) -> str:\n return \"\".join(cls._unicode_subscripts[int(_)] for _ in str(index))", "def getUniStr(self):\n return(\"%s.%s.%s-%s\"%(self.getPrefix,self.getSentenceId,\n self.offset_bgn,self.offset_end))", "def Astr(string):\n\n return unicode(string, encoding='utf-8')", "def encode_unicode_string(string, length=None):\n\t\n\tif string is None:\n\t\tstring = u''\n\treturn encode_string(string.encode('utf_8'), length)", "def display_unicode(self, string):\n if string is None:\n return ''\n return string.decode(\"utf16\", \"ignore\").encode(\"ascii\", 'backslashreplace')", "def ustr(obj):\n if IS_PY2:\n # If we are getting a string, then do an explicit decode\n # else, just call the unicode method of the object\n if type(obj) in [str, basestring]: # pragma: no cover # noqa\n return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa\n else:\n return unicode(obj) # pragma: no cover # noqa\n else:\n if type(obj) in [bytes]:\n return obj.decode(DEFAULT_ENCODING)\n else:\n return str(obj)", "def to_unicode(value, default=u''):\n try:\n if isinstance(value, unicode):\n return value\n return codecs.decode(value or default)\n except Exception:\n return codecs.decode(value or default, 'latin1')", "def to_unicode(s):\n if isinstance(s, basestring):\n return force_unicode(s)\n return s", "def runAsString(self, args):\n\t\toutput=BytesIO()\n\t\tself.run(args, output)\n\t\treturn output.getvalue().decode(u'utf-8')", "def as_unicode(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.StringType):\n return value.decode('utf-8')\n else:\n return value", "def utf8(value):\n if isinstance(value, (bytes, type(None))):\n return value\n if not isinstance(value, unicode_type):\n raise TypeError(\n \"Expected bytes, unicode, or None; got %r\" % type(value)\n )\n return value.encode(\"utf-8\")", "def to_unicode(x):\n try: # This may never fail, but let's be safe\n encoding = locale.getpreferredencoding()\n except:\n encoding = 'utf-8'\n ret = x.decode(encoding, 'replace').encode('utf-8')\n return ret", "def GetString(*args, **kwargs):\n return _gdi_.Locale_GetString(*args, **kwargs)", "def _str_args(self):\n return \"\"", "def stringToUnicode(x):\n if sys.version < '3':\n import codecs\n return codecs.unicode_escape_decode(x)[0]\n return x", "def ToUnicode(val):\n if sys.version_info[0] >= 3:\n return val\n return val if isinstance(val, unicode) else val.decode('utf-8')", "def unicode_quote(value):\n return quote(value.encode('utf-8'))", "def args_str(self):", "def as_utf8(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.UnicodeType):\n return value.encode('utf-8')\n else:\n return value", "def getUniStr(self):\n return(\"%s/%s\"%(Entity.getUniStr(self),self.semanticId))", "def touni(x, enc='utf8', err='strict'):\r\n return str(x, enc, err) if isinstance(x, bytes) else str(x)", "def getString(self, name: unicode) -> unicode:\n ...", "def format_arg(arg_name: str, value: Any, max_length: int = 200) -> str:\n return \"{arg_name}={value}\".format(\n arg_name=arg_name, value=trim_string(repr(value), max_length=max_length)\n )", "def string_unicode(text, encoding='utf-8'):\n try:\n if sys.version_info[0] >= 3:\n text = str(text)\n else:\n text = unicode(text, encoding) # pylint: disable=undefined-variable\n except: # pylint: disable=bare-except\n pass\n return text", "def to_unicode(string):\n assert isinstance(string, basestring)\n if sys.version_info[0] >= 3:\n if isinstance(string, bytes):\n return string.decode('utf-8')\n else:\n return string\n else:\n if isinstance(string, str):\n return string.decode('utf-8')\n else:\n return string", "def to_unicode(data):\n if isinstance(data, bytes):\n return data.decode('utf-8')\n else:\n return data", "def params_to_arg_string(**params):\n\targs = params_to_args(**params)\n\treturn ' '.join(args)", "def get_string(self, **kwargs):\n ...", "def native_string(input_var):\n if isinstance(input_var, str):\n return input_var\n\n return input_var.decode('utf-8', 'replace')", "def _sanitize_param(self, param):\n if param:\n # Can't send unicode.\n param = str(param)\n return param", "def get_safe_ex_string(ex, encoding=None):\n\n retVal = ex\n\n if getattr(ex, \"message\", None):\n retVal = ex.message\n elif getattr(ex, \"msg\", None):\n retVal = ex.msg\n return retVal.strip()\n # return getUnicode(retVal or \"\", encoding=encoding).strip()", "def __str__(self):\n return unicode(self).encode('utf-8')", "def create_action_id(*args):\r\n return u'-'.join([unicode(arg) for arg in args])", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, str):\n return text\n return str(text, encoding, errors=errors)", "def __unicode__(self):\n return u'%s' % str(self)", "def str(x) -> String:\n pass", "def format_arg(namespace, arg, lex):\n if lex and arg[0] in ('[', '('):\n return arg[0] + namespace + arg[1:]\n try:\n return namespace + arg\n except:\n return str.encode(namespace) + arg", "def lstr (obj):\n\n cmdlenc = locale.getdefaultlocale()[1]\n return repr(obj).decode(\"unicode_escape\").encode(cmdlenc)", "def asunicode(s):\n if isinstance(s, bytes):\n return s.decode('utf-8', 'replace')\n else:\n return s", "def _escapeArg(arg):\n #XXX There is a *lot* more that we should escape here.\n return arg.replace('\"', r'\\\"')", "def ensure_unicode_string(value):\n if not isinstance(value, six.string_types):\n raise TypeError(u'Expected string value, got: {}'.format(value))\n return six.text_type(value)", "def utf8(value):\r\n if isinstance(value, six.text_type):\r\n return value.encode('utf-8')\r\n assert isinstance(value, str)\r\n return value", "def get_argument_string(self, arguments):\n fixed_arguments = self._repopulate_required_arguments(arguments)\n return ' '.join(fixed_arguments)", "def FromUnicode(val):\n if sys.version_info[0] >= 3:\n return val\n return val if isinstance(val, str) else val.encode('utf-8')", "def encode_string(self, value):\r\n if not isinstance(value, str): return value\r\n try:\r\n return unicode(value, 'utf-8')\r\n except: # really, this should throw an exception.\r\n # in the interest of not breaking current\r\n # systems, however:\r\n arr = []\r\n for ch in value:\r\n arr.append(unichr(ord(ch)))\r\n return u\"\".join(arr)", "def bytesToStr(self, argvbytes, hexformat):\r\n msg = bytes(argvbytes)\r\n if hexformat:\r\n s = \"\"\r\n for i in range(len(msg)):\r\n hhex = \"%02x\" % msg[i]\r\n s += hhex + ' '\r\n return s\r\n else:\r\n return msg.decode(\"utf-8\")", "def __unicode__(self):\n return unicode(self).encode('utf-8')", "def text(self, argument):\n argument = Base.check_string_param(argument)\n result = ctypes.c_char_p(self.dss_obj.DSSPut_Command(argument.encode('ascii')))\n return result.value.decode(\"ascii\")", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, unicode):\n return text\n else:\n return unicode(text, encoding=encoding, errors=errors)", "def unicode_of_unit(quant):\n return quant.dimensionality.unicode", "def to_unicode(text, encoding='utf8', errors='strict'):\n if isinstance(text, unicode):\n return text\n return unicode(text, encoding, errors=errors)", "def test_to_unicode_raises_on_non_string():\n with pytest.raises(TypeError):\n to_unicode(999)", "def to_unicode(text, encoding='utf8', errors='strict'):\r\n if isinstance(text, unicode):\r\n return text\r\n return unicode(text, encoding, errors=errors)", "def cmd(*args):\r\n return \" \".join([str(arg) for arg in args])", "def to_unicode(s, encoding=\"utf-8\"):\n if isinstance(s, six.text_type):\n return s\n elif isinstance(s, bytes):\n return s.decode(encoding)\n # TODO: warning? Exception?\n return s", "def qstringToString( text ):\n\treturn unicode( text.toUtf8(), \"utf-8\" )\n\t# try:\n\t\t# return str( text.toUtf8() )\n\t# except UnicodeDecodeError:\n\t\t# return unicode( text.toUtf8(), \"utf-8\" )", "def getUniStr(self):\n return('-'.join([\"%s-%s\"%(x.id,(x.id in self.special))\n for x in self.getNested()]))" ]
[ "0.7720573", "0.7472295", "0.7162145", "0.7126575", "0.6827276", "0.66389793", "0.63655776", "0.63655776", "0.6295255", "0.62470526", "0.62246895", "0.61486644", "0.6147634", "0.6133242", "0.6103886", "0.6103886", "0.6103886", "0.6103886", "0.6103886", "0.6103886", "0.6079908", "0.607562", "0.6062363", "0.60452694", "0.6025831", "0.60025483", "0.59882593", "0.5933058", "0.59118813", "0.5906593", "0.59053713", "0.5901774", "0.588746", "0.5882539", "0.5861257", "0.5848872", "0.58204514", "0.58192396", "0.5814272", "0.5807079", "0.5789635", "0.57545394", "0.57501096", "0.5749157", "0.5731677", "0.5726191", "0.57244897", "0.57116014", "0.5709693", "0.570937", "0.5702222", "0.56932366", "0.56737626", "0.56671447", "0.5666247", "0.5663837", "0.56592095", "0.5650999", "0.56380194", "0.5630125", "0.5590816", "0.5589214", "0.5566382", "0.5557381", "0.55511194", "0.5550737", "0.55495626", "0.5533575", "0.5530098", "0.55278945", "0.55117625", "0.5506942", "0.54908603", "0.5487105", "0.54720193", "0.54641986", "0.5462562", "0.5457649", "0.5456235", "0.54357433", "0.54245275", "0.5416949", "0.5416659", "0.54080206", "0.5399733", "0.5399366", "0.539579", "0.5393124", "0.5390819", "0.537919", "0.5368594", "0.53640836", "0.53554416", "0.53543454", "0.5345738", "0.53448397", "0.53434676", "0.5333849", "0.5327685", "0.5327061", "0.5325859" ]
0.0
-1
Converts to `str` as `str(value)`, but use `as_str` for `bytes`.
def as_str_any(value): if isinstance(value, bytes): return as_str(value) else: return str(value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_string(value):\n if isinstance(value, encodingutils.binary_type):\n return encodingutils.bytes_to_string(value)\n else:\n return encodingutils.text_type(value)", "def convert_to_string(value: Any) -> str:\n if isinstance(value, str):\n return value\n\n if isinstance(value, bytes):\n return value.decode(\"utf-8\")\n\n return str(value)", "def to_str(value: bytes) -> str:\n if value is not None:\n out = value.decode(\"utf-8\")\n else:\n out = \"\"\n\n return out", "def bytes2str(val):\n if isinstance(val, bytes):\n return str(val, \"utf8\")\n else:\n return val", "def _to_bytes(value: Union[str, bytes]) -> bytes:\n return value if isinstance(value, bytes) else value.encode(\"utf-8\")", "def to_bytes(value: str) -> bytes:\n if value is not None:\n out = value.encode(\"utf-8\")\n else:\n out = b\"\"\n\n return out", "def smart_str(value, encoding='utf-8', errors='strict'):\n if not IS_PY3 and isinstance(value, unicode):\n return value.encode(encoding, errors)\n return str(value)", "def to_basestring(value):\r\n if value is None:\r\n return 'None'\r\n if isinstance(value, _BASESTRING_TYPES):\r\n return value\r\n elif isinstance(value, unicode_type):\r\n return value.decode(\"utf-8\")\r\n else:\r\n return str(value)", "def utf8(value):\r\n if isinstance(value, _UTF8_TYPES):\r\n return value\r\n elif isinstance(value, unicode_type):\r\n return value.encode(\"utf-8\")\r\n else:\r\n return str(value)", "def tostring(b):\n if isinstance(b, bytes):\n return bytes2str(b)\n else:\n return b", "def _unicodeify(self, value, encoding=\"utf8\"):\n if isinstance(value, str):\n return value\n return str(value, encoding)", "def decode_to_string(self, value):\n #if python3 or python 2.7\n ret = bytearray(value).decode(encoding='UTF-8')\n #if python2.7\n #ret = str(bytearray(value))\n return ret", "def encode(value, encoding='utf-8') -> bytes:\n return value if isinstance(value, bytes) else str(value).encode(encoding)", "def as_string(self, value, context=None):\n return str(value)", "def convert_to_string(value):\n if isinstance(value, str):\n return value\n # Boolean test must come before integer check!\n elif isinstance(value, bool):\n return str(value).lower()\n elif isinstance(value, int):\n return str(value)\n elif isinstance(value, float):\n return str(value)\n elif isinstance(value, UTCDateTime):\n return str(value).replace(\"Z\", \"\")\n else:\n raise TypeError(\"Unexpected type %s\" % repr(value))", "def as_utf8(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.UnicodeType):\n return value.encode('utf-8')\n else:\n return value", "def string(self, value):\n # respect {None}\n if value is None:\n # by leaving it alone\n return None\n # my value knows\n return str(value)", "def transform_python(self, value):\n return str(value)", "def to_str(v, encode=None):\n if isinstance(v, basestring_type):\n return v\n\n if isinstance(v, dict):\n return dict_to_str(v, encode)\n\n if isinstance(v, Iterable):\n return list_to_str(v, encode)\n\n if encode:\n return encode(v)\n else:\n return v", "def _encode(self, value):\n if value is None:\n return value\n if isinstance(value, six.binary_type):\n return value\n return value.encode(\"utf-8\")", "def ToString():\n @pass_failures\n def to_string(data):\n value = data.value\n if isinstance(value, Mapping):\n value = {k: str(v) for k, v in value.items()}\n else:\n value = str(value)\n data.value = value\n return data\n return to_string", "def _bytes_bytearray_to_str(s):\n if isinstance(s, (bytes, bytearray)):\n return s.decode()\n return s", "def bytes_to_str(s, encoding='utf-8'):\n if isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def to_string(value: Any) -> str:\n return StringConverter.to_string_with_default(value, '')", "def to_text(value, encoding='utf-8'):\n if not value:\n return ''\n if isinstance(value, six.text_type):\n return value\n if isinstance(value, six.binary_type):\n return value.decode(encoding)\n return six.text_type(value)", "def utf8(value):\r\n if isinstance(value, six.text_type):\r\n return value.encode('utf-8')\r\n assert isinstance(value, str)\r\n return value", "def force_bytes(value):\n if IS_PY3:\n if isinstance(value, str):\n value = value.encode(\"utf-8\", \"backslashreplace\")\n else:\n if isinstance(value, unicode): # NOQA: F821\n value = value.encode(\"utf-8\")\n\n return value", "def ensure_bytes(value: AnyStr) -> bytes:\n if isinstance(value, bytes):\n return value\n if isinstance(value, str):\n return value.encode('utf-8')\n raise TypeError(f\"input must be str or bytes, got {type(value).__name__}\")", "def clean_value(self, value):\n if isinstance(value, bytes):\n return value.decode('utf-8')\n else:\n return str(value)", "def to_str(value):\n if value is None:\n return \"\"\n if str(value) == value:\n return value\n try:\n return value.to_str()\n except AttributeError:\n try:\n return \"\\n\".join(to_str(v) for v in value)\n except TypeError:\n return str(value)", "def _tostr(obj): # pragma: no cover\n return obj if isinstance(obj, str) else obj.decode()", "def _to_str(s, encoding=\"utf8\", errors=\"ignore\"):\n if isinstance(s, bytes):\n return s.decode(encoding=encoding, errors=errors)\n return str(s)", "def as_str(self):\n return self.as_type(str)", "def serialize_to_python(cls, value):\n if isinstance(value, bytes):\n value = value.decode('utf-8')\n\n result = repr(value)\n\n if six.PY2 and result.startswith('u'):\n # Make sure we're getting the real Unicode values out, and not\n # string escapes.\n #\n # Users will need to add a \"coding: utf-8\" to the file, if\n # Unicode characters are present and they care about support\n # for Python 2.7.\n result = result[1:].decode('unicode-escape')\n\n return result", "def bytes_(s: Any, encoding: str = 'utf-8', errors: str = 'strict') -> Any:\n if isinstance(s, int):\n s = str(s)\n if isinstance(s, str):\n return s.encode(encoding, errors)\n return s", "def bytes_(s: Any, encoding: str = 'utf-8', errors: str = 'strict') -> Any:\n if isinstance(s, int):\n s = str(s)\n if isinstance(s, str):\n return s.encode(encoding, errors)\n return s", "def bytes_to_str(s, encoding='utf-8'):\n if six.PY3 and isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def bytes_to_str(s, encoding='utf-8'):\n if six.PY3 and isinstance(s, bytes):\n return s.decode(encoding)\n return s", "def str_to_python(self, value):\r\n return unicode_safe(value)", "def ToString(bval):\n return bval.decode('utf-8')", "def serialize_to_python(cls, value):\n return repr(value)", "def to_string(self, name, value):\r\n \r\n return str(value)", "def bytes_to_str(self, data):\n if isinstance(data, str):\n return data\n return data.decode(\"utf-8\")", "def as_str(self) -> str:\n if isinstance(self.data, str):\n return self.data\n elif isinstance(self.data, bytes):\n return self.data.decode()\n else:\n return bytes(self.data).decode()", "def bytes2str(data):\n # pylint: disable=multiple-statements\n\n if isinstance(data, bytes): return data.decode('utf-8')\n if isinstance(data, dict): return dict(map(bytes2str, data.items()))\n if isinstance(data, tuple): return map(bytes2str, data)\n return data", "def make_string(value):\n if value:\n return str(value)\n return None", "def headerValueAsBytes(value):\n # type: (String) -> bytes\n if isinstance(value, bytes):\n return value\n else:\n return value.encode(HEADER_VALUE_ENCODING)", "def asbytes(s):\n if isinstance(s, bytes):\n return s\n else:\n return s.encode('utf-8')", "def utf8(value):\n if isinstance(value, (bytes, type(None))):\n return value\n if not isinstance(value, unicode_type):\n raise TypeError(\n \"Expected bytes, unicode, or None; got %r\" % type(value)\n )\n return value.encode(\"utf-8\")", "def _stringify(obj):\r\n if isinstance(obj, unicode):\r\n return obj.encode('utf-8')\r\n elif isinstance(obj, str):\r\n return obj\r\n else:\r\n raise TypeError('Object is not a string.')", "def toString(s):\n if type(s) == type(\"\"):\n return s\n else:\n return s.decode()", "def encode_string(self, value):\r\n if not isinstance(value, str): return value\r\n try:\r\n return unicode(value, 'utf-8')\r\n except: # really, this should throw an exception.\r\n # in the interest of not breaking current\r\n # systems, however:\r\n arr = []\r\n for ch in value:\r\n arr.append(unichr(ord(ch)))\r\n return u\"\".join(arr)", "def _as_bytes(s):\n if isinstance(s, bytes):\n return s\n return bytes(s, encoding='latin_1')", "def force_str(s):\n return (s.encode('utf8')\n if isinstance(s, unicode)\n else str(s))", "def value_str(self):\n return self._to_str(self.value)", "def force_unicode(value):\n if IS_PY3:\n # Python 3.X\n if isinstance(value, bytes):\n value = value.decode(\"utf-8\", errors=\"replace\")\n elif not isinstance(value, str):\n value = str(value)\n else:\n # Python 2.X\n if isinstance(value, str):\n value = value.decode(\"utf-8\", \"replace\")\n elif not isinstance(value, basestring): # NOQA: F821\n value = unicode(value) # NOQA: F821\n\n return value", "def str_value(self, data):\n return str(self.value(data))", "def to_str(source: Union[str, bytes, IO[bytes]]) -> str:\n if isinstance(source, str):\n return source\n elif isinstance(source, bytes):\n # XXX: Assume it's UTF-8 encoded!\n return source.decode('UTF-8')\n else:\n raise NotImplementedError", "def asString(obj):\n if type(obj) in _STR_TYPES:\n return obj\n return str(obj)", "def pack_str(value: str) -> bytes:\n str_bytes = value.encode('utf-8')\n length_bytes = _pack_length(str_bytes)\n return length_bytes + str_bytes", "def to_bytes(bytes_or_str):\r\n if isinstance(bytes_or_str, str):\r\n value = bytes_or_str.encode('utf-8')\r\n else:\r\n value = bytes_or_str\r\n return value", "def _encode_value(self, value):\n return pickle.dumps(value)", "def convert(cls, value: Any) -> Optional[str]:\n # Can be optional\n if value is None:\n return None\n\n cls.assert_value_ok(isinstance(value, str), value)\n\n return value", "def bytes_to_str(b, encoding='ascii'):\n return b.decode(encoding)", "def convert_to_string(_bytes: bytes)-> str:\n # print('input bytes: ', _bytes)\n # print('string: ', binascii.hexlify(_bytes))\n # print('string2: ', _bytes.hex())\n # print('string3: ', \" \".join([\"{:02x}\".format(x) for x in _bytes]))\n return \" \".join([\"{:02x}\".format(x) for x in _bytes])", "def ToBytes(value) -> bytes:\n return _GetFactory(type(value)).ToBytes(value)", "def format_value(self, val):\n\n if isinstance(val, (unicode, str)):\n val = self.standardise_quotes(val)\n \n try:\n val = unicode(val)\n except UnicodeDecodeError:\n # obj is byte string\n ascii_text = str(val).encode('string_escape')\n val = unicode(ascii_text)\n\n return val", "def as_unicode(value):\n assert value is None or isinstance(value,types.StringTypes)\n if isinstance(value,types.StringType):\n return value.decode('utf-8')\n else:\n return value", "def ensure_unicode_string(value):\n if not isinstance(value, six.string_types):\n raise TypeError(u'Expected string value, got: {}'.format(value))\n return six.text_type(value)", "def _force_string(x):\n if isinstance(x, basestring):\n return x\n else:\n return str(x)", "def getBytesIOString(bytesIO):\n if _BytesIOValueIsStr:\n # We don't need to convert.\n return bytesIO.getvalue()\n else:\n # Assume value is a Python 3 bytes object. Convert to str.\n return \"\".join(map(_bytesElementToChr, bytesIO.getvalue()))", "def escape(self, value) -> str:\n def to_str(val):\n if isinstance(val, bytes):\n val = val.decode('utf-8')\n return QuotedString(val).getquoted().decode('utf-8')\n func = self.python_type\n if isinstance(value, (datetime.datetime, datetime.date)):\n value = str(value)\n func = to_str\n if issubclass(self.python_type, str):\n func = to_str\n return func(value)", "def sstr(obj):\n if IS_PY2:\n # For lists and tuples in python2, remove unicode string representation characters.\n # i.e. ensure lists are printed as ['a', 'b'] and not [u'a', u'b']\n if type(obj) in [list]:\n return [sstr(item) for item in obj] # pragma: no cover # noqa\n elif type(obj) in [tuple]:\n return tuple(sstr(item) for item in obj) # pragma: no cover # noqa\n\n return unicode(obj).encode(DEFAULT_ENCODING) # pragma: no cover # noqa\n else:\n return obj # pragma: no cover", "def pystr(s):\n if six.PY2 and isinstance(s, six.text_type):\n return s.encode('ascii', 'ignore')\n elif six.PY3 and isinstance(s, six.binary_type):\n return s.decode('utf-8')\n else:\n return s", "def convert_to_str(string):\n if type(string) is str:\n return string\n else:\n return bytes.decode(string)", "def _coerce_string_value(self, value):\n # coerce bool before int as python says a bool is an int\n if isinstance(value, bool):\n # coerce bool to str type\n self.log.warning(f'Coercing bool value ({value}) to a string (\"{str(value).lower()}\").')\n value = str(value).lower()\n\n # coerce int to str type\n if isinstance(value, (float, int)):\n self.log.warning(f'Coercing float/int value ({value}) to a string (\"{str(value)}\").')\n value = str(value)\n\n return value", "def smart_str(s):\n # Handle the common case first for performance reasons.\n if issubclass(type(s), str) or isinstance(s, _PROTECTED_TYPES):\n return s\n if isinstance(s, bytes):\n return str(s, 'utf-8')\n return str(s)", "def _convertUnicodeForCPS(self, value):\n try:\n value = str(value)\n except UnicodeEncodeError:\n try:\n value = str(value.encode('ISO-8859-15'))\n except UnicodeEncodeError:\n value = repr(value)\n return value", "def to_str(s, encoding='utf-8', strings_only=False, errors='strict'):\n\n if strings_only and isinstance(s, (types.NoneType, int)):\n return s\n\n if not isinstance(s, basestring):\n try:\n return str(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return ' '.join([smart_str(arg, encoding, strings_only,\n errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n elif isinstance(s, unicode):\n return s.encode(encoding, errors)\n elif s and encoding != 'utf-8':\n return s.decode('utf-8', errors).encode(encoding, errors)\n else:\n return s", "def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes:\n if isinstance(value, str):\n try:\n return value.encode(\"ascii\")\n except UnicodeEncodeError:\n raise TypeError(f\"{name} strings may not include unicode characters.\")\n elif isinstance(value, bytes):\n return value\n\n seen_type = type(value).__name__\n raise TypeError(f\"{name} must be bytes or str, but got {seen_type}.\")", "def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):\r\n if strings_only and isinstance(s, (types.NoneType, int)):\r\n return s\r\n # if isinstance(s, Promise):\r\n # return unicode(s).encode(encoding, errors)\r\n if not isinstance(s, basestring):\r\n try:\r\n return str(s)\r\n except UnicodeEncodeError:\r\n if isinstance(s, Exception):\r\n # An Exception subclass containing non-ASCII data that doesn't\r\n # know how to print itself properly. We shouldn't raise a\r\n # further exception.\r\n return ' '.join([smart_str(arg, encoding, strings_only,\r\n errors) for arg in s])\r\n return unicode(s).encode(encoding, errors)\r\n elif isinstance(s, unicode):\r\n return s.encode(encoding, errors)\r\n elif s and encoding != 'utf-8':\r\n return s.decode('utf-8', errors).encode(encoding, errors)\r\n else:\r\n return s", "def _valueString(value,verbose=0):\n\n t = type(value)\n vstr = t.__name__\n if issubclass(t, str):\n if len(value)>42:\n vstr = vstr + \", value = \"+ `value[:39]` + '...'\n else:\n vstr = vstr + \", value = \"+ `value`\n elif issubclass(t, _listTypes):\n return \"%s [%d entries]\" % (vstr, len(value))\n elif (PY3K and issubclass(t, io.IOBase)) or \\\n (not PY3K and issubclass(t, file)):\n vstr = vstr + \", \"+ `value`\n elif issubclass(t, _numericTypes):\n vstr = vstr + \", value = \"+ `value`\n elif _isinstancetype(value):\n cls = value.__class__\n if cls.__module__ == '__main__':\n vstr = 'instance of class ' + cls.__name__\n else:\n vstr = 'instance of class ' + cls.__module__ + '.' + cls.__name__\n elif issubclass(t, _functionTypes+_methodTypes):\n # try using Fredrik Lundh's describe on functions\n try:\n vstr = vstr + ' ' + describe.describe(value)\n try:\n if verbose and value.__doc__:\n vstr = vstr + \"\\n\" + value.__doc__\n except AttributeError:\n pass\n except (AttributeError, TypeError):\n # oh well, just have to live with type string alone\n pass\n elif issubclass(t, _numpyArrayType):\n vstr = vstr + \" \" + str(value.dtype) + \"[\"\n for k in range(len(value.shape)):\n if k:\n vstr = vstr + \",\" + `value.shape[k]`\n else:\n vstr = vstr + `value.shape[k]`\n vstr = vstr + \"]\"\n else:\n # default -- just return the type\n pass\n return vstr", "def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):\n if strings_only and isinstance(s, (types.NoneType, int)):\n return s\n if not isinstance(s, basestring):\n try:\n return str(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return ' '.join([Core.smart_str(arg, encoding, strings_only, errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n elif isinstance(s, unicode):\n return s.encode(encoding, errors)\n elif s and encoding != 'utf-8':\n return s.decode('utf-8', errors).encode(encoding, errors)\n else:\n return s", "def __bytes__(self):\n return bytes([self.type * 2, len(self.value)]) + bytes(self.value, 'utf-8')", "def __bytes__(self):\n return bytes([self.type * 2, len(self.value)]) + bytes(self.value, 'utf-8')", "def __bytes__(self):\n return bytes([self.type * 2, len(self.value)]) + bytes(self.value, 'utf-8')", "def to_string(data):\n if isinstance(data, bytes):\n return data.decode('utf-8')\n else:\n return data", "def __smart_str(self, s, encoding='utf-8', strings_only=False, errors='strict'):\n if strings_only and isinstance(s, (types.NoneType, int)):\n return s\n if not isinstance(s, basestring):\n try:\n return str(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return ' '.join([self.__smart_str(arg, encoding, strings_only,\n errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n elif isinstance(s, unicode):\n return s.encode(encoding, errors)\n elif s and encoding != 'utf-8':\n return s.decode('utf-8', errors).encode(encoding, errors)\n else:\n return s", "def native_(s, encoding='latin-1', errors='strict'):\n if isinstance(s, text_type):\n return s.encode(encoding, errors)\n return str(s)", "def get_string_value(self, obj, field):\n return smart_unicode(field.value_to_string(obj))", "def ensure_utf8_bytes(v: Union[str, bytes]) -> bytes:\n if isinstance(v, str):\n v = v.encode(\"utf-8\")\n return v", "def ustr(obj):\n if IS_PY2:\n # If we are getting a string, then do an explicit decode\n # else, just call the unicode method of the object\n if type(obj) in [str, basestring]: # pragma: no cover # noqa\n return unicode(obj, DEFAULT_ENCODING) # pragma: no cover # noqa\n else:\n return unicode(obj) # pragma: no cover # noqa\n else:\n if type(obj) in [bytes]:\n return obj.decode(DEFAULT_ENCODING)\n else:\n return str(obj)", "def test_bytes_to_native_str(self):\n b = bytes(b'abc')\n s = bytes_to_native_str(b)\n if PY2:\n self.assertEqual(s, b)\n else:\n self.assertEqual(s, 'abc')\n self.assertTrue(isinstance(s, native_str))\n self.assertEqual(type(s), native_str)", "def safestr(obj, encoding='utf-8'):\n if isinstance(obj, unicode):\n return obj.encode(encoding)\n elif isinstance(obj, str):\n return obj\n elif hasattr(obj, 'next') and hasattr(obj, '__iter__'): # iterator\n return itertools.imap(safestr, obj)\n else:\n return str(obj)", "def native_(s, encoding='latin-1', errors='strict'):\n if isinstance(s, text_type):\n return s\n return str(s, encoding, errors)", "def valueToString():", "def stringify(obj):\n tp = type(obj)\n if issubclass(tp, basestring):\n return obj\n elif hasattr(tp, '__unicode__'):\n s = tp.__unicode__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__unicode__ did not return a string')\n return s\n elif hasattr(tp, '__str__'):\n s = tp.__str__(obj)\n if not isinstance(s, basestring):\n raise TypeError('__str__ did not return a string')\n return s\n else:\n return str(obj)", "def value_to_string(self, obj):\n value = self.value_from_object(obj)\n return value", "def serialize_str(self, obj):\n if len(obj) < 0x100:\n return 'U' + struct.pack('<B', len(obj)) + obj\n return 'T' + struct.pack('<I', len(obj)) + obj", "def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):\n if strings_only and isinstance(s, (types.NoneType, int)):\n return s\n if not isinstance(s, basestring):\n try:\n return str(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return ' '.join([smart_str(arg, encoding, strings_only,\n errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n elif isinstance(s, unicode):\n return s.encode(encoding, errors)\n elif s and encoding != 'utf-8':\n return s.decode('utf-8', errors).encode(encoding, errors)\n else:\n return s" ]
[ "0.8348208", "0.8329601", "0.8227834", "0.81440604", "0.7737775", "0.7444207", "0.74415994", "0.734178", "0.7317427", "0.7214035", "0.7206868", "0.7206296", "0.7203044", "0.72022784", "0.71490884", "0.705423", "0.7025212", "0.70000285", "0.69751537", "0.69738203", "0.6964417", "0.69249815", "0.6904178", "0.6903141", "0.6900444", "0.6872925", "0.68617445", "0.6847529", "0.6843383", "0.68405724", "0.6836679", "0.6833909", "0.6827976", "0.6820632", "0.6813064", "0.6813064", "0.6798753", "0.6798753", "0.67880845", "0.67878866", "0.6783079", "0.6762846", "0.67515844", "0.67251706", "0.67219996", "0.66965455", "0.66863817", "0.6664721", "0.66610676", "0.6658839", "0.66491187", "0.6646169", "0.663311", "0.663231", "0.66286755", "0.66254216", "0.6607964", "0.6600387", "0.6584503", "0.6553354", "0.65502423", "0.65475994", "0.6529814", "0.6505637", "0.6493872", "0.6476522", "0.64759594", "0.6474917", "0.6434676", "0.64328074", "0.64277834", "0.6407241", "0.64064544", "0.6400159", "0.6376857", "0.63758105", "0.63697827", "0.6364746", "0.6363611", "0.6362427", "0.6361207", "0.6356443", "0.63487405", "0.6346193", "0.6346193", "0.6346193", "0.63300174", "0.63255227", "0.6322348", "0.63125116", "0.6305169", "0.6281447", "0.6275635", "0.62414426", "0.6239993", "0.62291265", "0.6213644", "0.62035197", "0.6195714", "0.6192997" ]
0.80614305
4
Returns the file system path representation of a `PathLike` object.
def path_to_str(path): if hasattr(path, '__fspath__'): path = as_str_any(path.__fspath__()) return path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def as_string(path: pathlib.Path) -> str:\n return path.as_posix()", "def path_serializer(obj: PurePath, **_: Any) -> str:\n return obj.as_posix()", "def as_pathlib(self):\n return Path(self.absolute)", "def _purepath_to_str(\n self, path: Union[Path, PurePath, str]\n ) -> Union[Path, PurePath, str]:\n if isinstance(path, PurePath):\n path = str(path)\n return path", "def posix_path(self, **kw):\n with_drive_letter = kw.get(\"with_drive\", True)\n return self._construct_path(\"/\", with_drive_letter)", "def __fspath__(self):\n return str(self)", "def get_path(self) -> Union[str, 'BytesIO']:\n return self._filepath", "def _path_to_string(path):\n return '.'.join(path)", "def normpath(path: Union[str, BasePathLike]) -> str:\n # convertion to string in order to allow receiving non string objects\n return os.path.relpath(str(path))", "def os_path(self, **kw):\n with_drive = kw.get(\"with_drive\", True)\n if os.name == \"nt\":\n return self.windows_path(with_drive=with_drive)\n return self.posix_path(with_drive=with_drive)", "def path(self):\n\t\treturn os.path.join(*self._string_values(limit=4))", "def convert_to_path(arg: Any) -> Path:\n return Path(arg)", "def saved_file_path_string(self):\n return self.saved_file_path.as_posix()", "def stringyfy(path):\n try:\n # Pathlib support\n path = path.__fspath__()\n except AttributeError:\n pass\n if hasattr(path, 'name'): # passed in a file\n path = path.name\n if isinstance(path, str):\n return path\n raise ValueError(f'Cannot convert {path} to a path')", "def path(self):\n\n if os.path.isabs(self._value):\n return pathlib.Path(self._value)\n raise RuntimeError('RequestString.path not supported.')", "def getpath(self, path):\n return self._join(path)", "def get_path(self, p_path):\n path_a = False\n path_a = path.abspath(p_path) # .abs\n path_p = {pattr: False for pattr in ['isDir', 'isFile', 'isLink', 'isMount',\n 'parent', 'item']}\n path_e = True if path.exists(path_a) else False # .exists\n if path_e:\n path_p['isDir'] = True if path.isdir(p_path) else path_p['isDir']\n path_p['isFile'] = True if path.isfile(p_path) else path_p['isFile']\n path_p['isLink'] = True if path.islink(p_path) else path_p['isLink']\n path_p['isMount'] = True if path.ismount(p_path) else path_p['isMount']\n path_a = path.normpath(path.normcase(path.realpath(path_a))) # .abs\n v_parts = path.split(path_a)\n path_p['parent'] = v_parts[0]\n path_p['item'] = v_parts[1]\n\n fpath = namedtuple('fpath', 'rqst exists abs isDir isFile isLink isMount parent item')\n return fpath(p_path, path_e, path_a, path_p['isDir'], path_p['isFile'], path_p['isLink'],\n path_p['isMount'], path_p['parent'], path_p['item'])", "def _path_to_str(var):\n if not isinstance(var, (Path, str)):\n raise ValueError(\"All path parameters must be either strings or \"\n \"pathlib.Path objects. Found type %s.\" % type(var))\n else:\n return str(var)", "def _get_as_path(self):\n return self.__as_path", "def build_path(path: Union[Path, str], path_is_absolute: bool = False) -> Path:\n if not path_is_absolute:\n return Path(os.getcwd()) / path\n if isinstance(path, str):\n return Path(path)\n return path", "def osnorm(self):\n import os\n if os.sep=='/' and \"\\\\\" in str(self):\n return Path(os.path.normpath(str(self).replace('\\\\','/' )))\n elif os.sep=='\\\\' and \"/\" in str(self):\n return Path(os.path.normpath(str(self).replace('/','\\\\' )))\n else:\n return self.norm()", "def path(sc, file_path):\n path_class = sc._gateway.jvm.org.apache.hadoop.fs.Path\n path_obj = path_class(file_path)\n return path_obj", "def realpath(path: str) -> str:\n pass", "def path(self):\n # type: () -> string_types\n return self._path", "def as_path(self, parent=None):\n if parent is not None: return Path(parent) / Path(self.as_str())\n return Path(self.as_str())", "def system_path(path):\n if is_windows(): return path.replace('/', '\\\\')\n else: return path.replace('\\\\', '/')", "def getPath(self):\n path = '/'.join(self.getPhysicalPath())\n return path", "def __fspath__(self):\n raise NotImplementedError", "def fpath(self):\n return os.path.join(self.path, self.name)", "def containing_path(path: Union[str, os.PathLike]) -> str:\n if not path:\n return str(path)\n url = urlparse(str(path))\n if url.scheme:\n if url.path:\n return os.path.dirname(path)\n return url.scheme + \"://\"\n return os.path.dirname(os.path.realpath(path))", "def path_str(path):\n\toutput = \"PATH: \"\n\tif path:\n\t\tfor i in path:\n\t\t\toutput += str(i.data) + \" -> \"\n\telse:\n\t\toutput += \"Empty\"\n\treturn output", "def getPath(obj):", "def get_path(self, path):\n return abspath(join(self.origin, *path))", "def abspath(path: str) -> str:\n pass", "def get_path(self, _property=None):\n return self._get_path(\"path\", _property)", "def format_path (in_path):\n return os.path.realpath(os.path.expanduser(in_path))", "def path(self, *args, **kwds):\n def makepath(args, mkdir=True):\n path = os.path.join(self.dir, *args)\n dirname = os.path.dirname(path)\n if mkdir and not os.path.isdir(dirname):\n os.makedirs(dirname)\n return path\n return makepath(args, **kwds)", "def path(self, toNative=True):\n return self.text(toNative=toNative)", "def get_path(self):\n try:\n return self._file.path\n except AttributeError:\n return os.path.abspath(self._file.name)", "def _get_filesystem_path(self, request):\n return self._get_path(filesystem_path(self.base_path, request, self.url_base), False)", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def system_path(self, path):\n return os.path.join(self.prefix, path.lstrip('/'))", "def make_fs_path(parts):\n return '/'.join(parts)", "def ospath(self, vPath):\n if not vPath.startswith('/'):\n raise OSError(vPath)\n parts = vPath.split('/')\n toppath = self._top_paths[parts[1]]\n return os.path.join(toppath, *parts[2:])", "def str_to_path(name):\n import os;\n return(os.path.abspath(name));", "def path(input_path, name=\"\", is_file=False, exists=False):\n string(input_path, \"%s path\" % name, False, None)\n input_path = os.path.abspath(input_path)\n\n if is_file:\n path_type = \"file\"\n else:\n path_type = \"directory\"\n if exists:\n if not os.path.exists(input_path):\n __ex(\"The given %s %s does not exist.\" % (name, path_type), False)\n if (is_file and not os.path.isfile(input_path)) or \\\n (not is_file and not os.path.isdir(input_path)):\n __ex(\"The given %s %s path is not a %s.\" % (name, path_type,\n path_type), False)\n else:\n if os.path.exists(input_path):\n __ex(\"The given %s %s path already exists.\" % (name, path_type),\n False)", "def get_path(self) -> Optional[str]:\n return self.path", "def path(self) -> Path:\n return self._path", "def __get_path(self):\n return self.path", "def realpath(self):\n return self.__class__(os.path.realpath(self.strpath))", "def get_real_path(path):\r\n real_path = lib_path.realpath(path)\r\n return real_path", "def get_fspath ( self, relpath=None ):\n if relpath:\n return self.root + os.sep + str ( relpath )\n else:\n return self.root", "def absolute_physical_path(self) -> str:\n return self._path", "def realpath(path, *, strict=False):\n if strict is not False:\n raise NotImplementedError('\"strict\" not supported on Python < 3.10')\n return _os.path.realpath(path)", "def systemPathToFileUrl(self, path):\n from unohelper import systemPathToFileUrl\n return systemPathToFileUrl(path)", "def makePath(path):\n\n compatPath = os.path.abspath(os.path.expanduser(path))\n\n return compatPath", "def path(self):\n\n if self.file_func:\n path = self.file_func(self.lookup_obj, **self.pattern_params)\n return FilePath(path=path)\n return FilePath(path=\"\")", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def file_path(self) -> global___Expression:", "def normpath (path):\n return os.path.normpath(path)", "def get_path(self):\n return self.path", "def path(self, prefix, args=()):\n assert len(args) == self.nargs\n prefix = os.path.abspath(prefix)\n name = self.name_(args)\n assert os.path.relpath(name) == name\n assert len(_os_path_split_all(name)) == self.depth\n return os.path.join(prefix, name)", "def nt_path_to_posix_path(path):\r\n path = path.replace(\"\\\\\", \"/\")\r\n parts = path.split(\":\")\r\n if len(parts) > 1:\r\n return \"/\" + parts[0].lower() + parts[1]\r\n return path", "def get_path(self):\n raise NotImplementedError(\"This asset does not support absolute paths\")", "def path(self):\n return self.lib.path", "def _get_os_path(self, name=None, path=''):\n\t\t\n\t\tif self.notebook_dir:\n\t\t\tout_path =os.path.join( self.notebook_dir, path.lstrip('/'))\n\t\telse:\n\t\t\tout_path = path\n\t\t\n\t\tif name:\n\t\t\tout_path = os.path.join(out_path, name.lstrip('/'))\n\t\t\n\t\treturn out_path", "def path_image(image):\n return bpy.path.abspath(image.filepath, library=image.library).replace(\"\\\\\", \"/\")\n # .replace(\"\\\\\",\"/\") to get only forward slashes as it's what POV prefers,\n # even on windows", "def get_path(self):\n return self.path", "def dirpath(self, *args, **kwargs):\n if not kwargs:\n path = object.__new__(self.__class__)\n path.strpath = dirname(self.strpath)\n if args:\n path = path.join(*args)\n return path\n return self.new(basename=\"\").join(*args, **kwargs)", "def full_path(self):\n return os.path.abspath(self.path)", "def __make_path(self, filename):\n return self.__path() + os.sep + filename", "def displaypath():\n\n import pathlib\n pth = pathlib.Path('./')\n pth.is_dir()\n pth.absolute()", "def realpath(self, path):\n return os.path.realpath(path)", "def get_path_filename(handle):\n path = config['path'].strip('/').strip()\n return path + '/' + handle + config['extension']", "def path(self):\n p = self\n\n name = [p.name()]\n offsets = set([p._offset])\n while p.has_parent_key():\n p = p.parent_key()\n if p._offset in offsets:\n name.append(\"[path cycle]\")\n break\n name.append(p.name())\n offsets.add(p._offset)\n return '\\\\'.join(reversed(name))", "def get_path(self):\n\n if not self.path:\n Settings.err_print(\"missing file path\")\n return \"\"\n return self.path", "def get_path(self):\n\n return self._path", "def __path(self):\n if self.parent:\n return self.parent.__path() + os.sep + self.__sanitize(self.name)\n return self.__sanitize(self.name)", "def _path_join(self, path):\n return os.path.join(self._path, path)", "def convertString(path):\n if (\"win\" in sys.platform):\n return path.replace(\"/\",\"\\\\\")\n elif (\"linux\" in sys.platform):\n return path.replace(\"\\\\\",\"/\")", "def path(self, f):\n\t\treturn os.path.join(self.directory, f)", "def _GeneratePathStr(path):\n return ((len(path) - 1) * ' ') + path[-1] if path else ''", "def path_to_string(path: Path) -> str:\n assert_continuous(path)\n\n pieces = [\"M {} {}\".format(path[0].p0[0], path[0].p0[1])]\n for curve in iter(path): # iter cast not strictly necessary\n piece = \"C {} {} {} {} {} {}\".format(\n int(round(curve.c0[0])), int(round(curve.c0[1])),\n int(round(curve.c1[0])), int(round(curve.c1[1])),\n int(round(curve.p1[0])), int(round(curve.p1[1]))\n )\n pieces.append(piece)\n\n return \" \".join(pieces)", "def path(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"path\")", "def data_path(path: str, createdir: bool = False) -> str:\n path_obj = Path(path)\n if not path_obj.is_absolute():\n if inside_project():\n path_obj = Path(project_data_dir(), path)\n else:\n path_obj = Path(\".scrapy\", path)\n if createdir and not path_obj.exists():\n path_obj.mkdir(parents=True)\n return str(path_obj)", "def path_filename_representation(path):\n # Strip leading / and replace / with .\n return re.sub(r\"^/(.*)$\", r\"\\1\", path).replace(\"/\", \".\")", "def get_write_path(*args) -> str:\n\n return str(Path(WRITE_PATH, *args))", "def abspath(self):\n if self.__abspath is None:\n self.__abspath = pbxpath.abspath(self)\n return self.__abspath", "def get_path(path):\n if _prefix and not '/' in path:\n path = _prefix + path\n\n if not _cwd:\n return path\n\n return join(_cwd, path)", "def GetNativePath(*args, **kwargs):\n return _gdi_.GraphicsPath_GetNativePath(*args, **kwargs)", "def get_proj_dir(path: Union[pathlib.PurePath, str] = __file__) -> str:\n return str(pathlib.Path(path).parent.absolute())", "def test_pathlib_obj(self):\n \"\"\"\n We do this because pygame functions internally use pg_EncodeString\n to decode the filenames passed to them. So if we test that here, we\n can safely assume that all those functions do not have any issues\n with pathlib objects\n \"\"\"\n encoded = encode_string(pathlib.PurePath(\"foo\"), \"utf-8\")\n self.assertEqual(encoded, b\"foo\")\n\n encoded = encode_string(pathlib.Path(\"baz\"))\n self.assertEqual(encoded, b\"baz\")", "def path(self):\n if self._path:\n return self._path\n path = os.environ[\"PATH\"].split(os.pathsep)\n path = [os.path.expanduser(x) for x in path]\n path = [os.path.abspath(x) for x in path]\n path = [x for x in path if os.path.exists(x)]\n self._path = path\n return self._path", "def path(self):\n return self.file_path()", "def getPath(self, date, sep = '/'):\n\n return sep.join( [self.getDirName(date), self.getFileName(date)] )", "def get_absolute_path(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetAbsolutePath', self.handle)" ]
[ "0.6641513", "0.64170814", "0.6181322", "0.61145943", "0.5960764", "0.5935031", "0.5927816", "0.5881648", "0.5880042", "0.58208156", "0.5757177", "0.57282186", "0.5713228", "0.5694669", "0.56730723", "0.5647542", "0.56423336", "0.56211954", "0.56108016", "0.5586084", "0.5552537", "0.5552169", "0.55443764", "0.5454196", "0.54332507", "0.54233944", "0.5420171", "0.53725165", "0.53448623", "0.5344295", "0.5339562", "0.5320499", "0.5291719", "0.5288149", "0.5283706", "0.5283187", "0.5283107", "0.5270022", "0.52669215", "0.52627486", "0.5250655", "0.5250655", "0.5250655", "0.5250655", "0.52465457", "0.52454257", "0.5241026", "0.5214769", "0.5206399", "0.52057314", "0.5198978", "0.5189912", "0.5187487", "0.51871896", "0.51838696", "0.5171013", "0.5169659", "0.51675606", "0.5155213", "0.51512206", "0.5132318", "0.5132318", "0.512954", "0.512849", "0.512749", "0.51258194", "0.5117948", "0.5117245", "0.5109106", "0.509696", "0.50827616", "0.50755066", "0.5071647", "0.5064", "0.5062912", "0.5055764", "0.50541496", "0.5054133", "0.5043191", "0.50418234", "0.50303775", "0.50266725", "0.50247556", "0.5024134", "0.50207037", "0.50160503", "0.50144374", "0.5004033", "0.50027335", "0.49987254", "0.4988968", "0.49852818", "0.4983878", "0.4980965", "0.49803472", "0.49775568", "0.4971047", "0.49694914", "0.49682638", "0.49679628" ]
0.61987334
2
DO NOT EDIT Initialize a node
def __init__(self, value, next_node=None): self.value = value # element at the node self.next_node = next_node # reference to next node
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n self.node = None\n self.data = None", "def __init__(self):\n self.root = Node('')", "def __init__(self):\n self.root = Node(None)", "def __init__(self):\n self.start = Node('-1')", "def __init__(self):\n self.root = Node(\"\")", "def __init__(self):\n self.root = Node(\"\")", "def __init__(self):\n self.root = self.Node(None)", "def __init__(self, node: Dict):\n self._node = node", "def __init__(self, node_text=\"\", node_type=0, node_parent=None):\n self.node_text = node_text\n self.node_type = node_type\n self.node_parent = node_parent\n self.node_left = None\n self.node_right = None", "def __init__(self):\n self.root = SimpleNode()", "def __init__(self):\n self.head = PrefixNode('', False)", "def __init__(self):\n self.root = self.Node()", "def _init_node_attributes(self):\n assert False", "def __init__(self):\n self.__root = Node()", "def __init__(self):\n self.root = Node()", "def __init__(self):\n self.root = Node()", "def __init__(self):\n self.root = Node()", "def __init__(self, start_node):\n self.start_node = start_node", "def __init__(self, node):\n self.node = node\n self.parent = None\n self.depth = None", "def __init__(self, nodes=None):\r\n self.nodes = nodes", "def __init__(self):\n self.root = self.get_new_node();", "def __init__(self):\n\n self.nodes = {}", "def __init__(self, data, node):\n self.data = data\n self.node = node", "def __init__(self):\n\n\t\tself.root = None\n\t\tself.numNodes = 0", "def __init__(self, node: Node[T]) -> None:\n self.current = node", "def __init__(self, xml_node: ET.Element) -> None:\n\t\treturn", "def __init__(self, tree_node=None):\n self.root = tree_node", "def __init__(self, node: Dict):\n super().__init__(node)", "def __init__(self):\r\n super(AppendNode, self).__init__()", "def __init__(self, value, parent = None):\n # initialize new node\n self.value = value\n self.parent = parent\n self.left = None\n self.right = None\n self.height = 1", "def __init__(self, nodes):\n\n self._nodes = nodes", "def __init__(self, data, node):\n self.data = data\n self.node = node # This is the data structure which holds the data for this node, e.g. lat, lon, etc.", "def __init__(self):\n self.root = TridNode()", "def create_nodes(self):", "def __init__(self):\n # use a Trie as a data structure\n self.root = Node()", "def initialize(self, node: MComputeNode):\n raise Exception(\"Subclass responsibility\")", "def _create_node(\n self,\n name,\n ):\n pass", "def __init__(self):\n self.root = TreeNode(None)", "def testInit(self):\n\n self.assertEqual(\n [],\n self.node.desc\n )", "def test_init_empty_node():\n from dll import Node\n new_node = Node()\n assert new_node.value is None", "def __init__(self, node_def, op, message, error_code):\n ...", "def __init__(self):\n\n self.head = None\n self.node_count = 0", "def __init__(self):\n self.root = WordNode()", "def __init__(self):\n self.size = 0\n self.head = Node(0)", "def __init__(self) -> None:\n\t\t# Call super\n\t\tsuper(RootNode, self).__init__()\n\t\tself.nodes: List[Node] = []\n\t\tself.subfiles: Set[str] = set()", "def __init__(self):\n self.root = TreeNode(\"\")", "def __init__(self):\n self.root = TreeNode(\"\")", "def __init__(self):\n self.root = RadixTreeNode()\n self.root.key = \"\"\n self.size = 0", "def __init__(self, *args, **kwds):\n if len(args) > 1:\n raise TypeError('expected at most 1 arguments, got %d' % len(args))\n try:\n self.first_node\n except AttributeError: \n self.first_node = None\n self.last_node = None\n\tself.update(*args, **kwds)", "def __init__(self, node_factory):\n self._node_factory = node_factory\n self._size = 0\n self._root = self._new_node()", "def __init__(self):\n node = ListNode(0) # dummy\n self.head = node\n self.tail = node\n self.len = 0", "def __init__(self):\n self.root = TrieNode(None)", "def __init__(self):\n self.root = TrieNode(None)", "def __init__(self):\n self._idx = Node.index\n Node.index += 1", "def __init__(self, data=None):\n self.data = data\n # initializing an empty node that has no next nor prior node\n self.next = self.prior = None", "def __init__(self):\n self.root = [None, dict(), False] # val, sons, end-able", "def __init__(self):\n self.root = TreeNode('#')", "def __init__(self):\n Node.__init__(self)\n self.__counts = 0\n self.__children = dict()\n self.__children_counts = dict()", "def init(self) -> None:", "def _initialize_trees(self):", "def __init__(self,value):\n try:\n self.value=value\n self.next=None\n\n except Exception as error:\n print (f\"There is error in __init__ of Node, the error {error}\")", "def __init__(self):\r\n\r\n super(Node, self).__init__()\r\n self.inputs = []\r\n self.outputs = []\r\n self._active_outputs = []\r\n self.description = None\r\n\r\n # Experimental: dictionary to be used to retype output fields\r\n # Currently used only in CSV source node.\r\n self._retype_dictionary = {}", "def __init__(self, *args):\n this = _libsbml.new_XMLNode(*args)\n try: self.this.append(this)\n except: self.this = this", "def __init__(__self__, *,\n nodes: pulumi.Input[Sequence[pulumi.Input[str]]]):\n pulumi.set(__self__, \"nodes\", nodes)", "def __init__(self, node_id):\n # Assign ID and update class-counter\n self.id = node_id\n\n # Initialize\n self.is_sequence_end = False\n self.children = {}", "def __init__(self):\n self.number = None\n self.nodes = []\n self.type = None\n self.group = None\n self.material = None\n self.key = -1", "def __init__(self, init=None):\n # TODO enable passing of starting node semantics\n super().__init__(init)\n self._internal_trie = Trie(FactNode)\n # parser : PyParsing.ParserElement\n self._main_parser = None\n self._query_parser = None\n\n if init is not None:\n self.add(init)", "def __init__(self, data: str):\n self.root = Node(data)\n self.node_count = 1\n self.node_of_last_computed_hash = 0", "def __init__(self):\n self.root= TrieNode()", "def __init__(self, nodes):\n\t\t\n\t\tself.variables = dict([(n.name, n) for n in nodes])\n\t\tself.roots = [n for n in nodes if not n.parents]\n\t\tself.nodes = nodes", "def __init__(self, path: str) -> None:\n self.root = Node(\n None, path\n )\n self.depth = 0\n self.add_node(self.root)", "def prepare_node_attrs(self):", "def __init__(self):\n self.head = ListNode()", "def __init__(self, xml_node: ET.Element) -> None:\n\t\t# Call super\n\t\tsuper(__class__, self).__init__()\n\t\t# Set blank variables\n\t\tself.nodes: List[Node] = []\n\t\tself.attributes = []\n\t\tself.subfiles = set()\n\t\tself.nested = False\n\t\t# Set _element\n\t\tself._element = xml_node\n\t\t# Call _setup() to setup node, uses _element\n\t\tself._setup()", "def __init__(self):\n\t\tself.root = TrieNode('*')", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self):\n self.root = TrieNode()", "def __init__(self, container, node):\n self.container = container\n self.node = node", "def __init__(self):\n self.root = TrieNode(\".\")", "def __init__(self, root_value):\n self.root = self.TreeNode(value=root_value)", "def __init__(self, *args):\n _snap.TNEANetNodeI_swiginit(self, _snap.new_TNEANetNodeI(*args))", "def __init__(self, data, previous = None, next = None):\n\t\tNode.__init__(self, data, next)\n\t\tself.previous = previous", "def __init__(self):\n self.head = None\n self.tail = None\n self.current_node = None", "def __init__(self, **kwargs):\n\n tkwargs = self._first_init(**kwargs)\n\n # make tagged \"readonly\" and \"attr\" traits read_only, and set them using set_trait\n # NOTE: The set_trait is required because this sets the traits read_only at the *class* level;\n # on subsequent initializations, they will already be read_only.\n with self.hold_trait_notifications():\n for name, trait in self.traits().items():\n if settings[\"DEBUG\"]:\n trait.read_only = False\n elif trait.metadata.get(\"readonly\") or trait.metadata.get(\"attr\"):\n if name in tkwargs:\n self.set_trait(name, tkwargs.pop(name))\n trait.read_only = True\n\n # Call traitlets constructor\n super(Node, self).__init__(**tkwargs)\n\n self._traits_initialized_guard = True\n\n self.init()", "def initialize(self):\n\t\tpass", "def __init__(self):\n self.root = None\n self.k = None", "def __init__(self):\n self.root = None\n self.k = None", "def _initialize_node_attributes(dag):\n # Whether each node has been visited by the search yet.\n nx.set_node_attributes(dag, 'visited', False)\n # The title of the row in the mutations dataset corresponding to this node.\n nx.set_node_attributes(dag, 'dataset', None)\n # The function chosen at this (internal) node.\n nx.set_node_attributes(dag, 'function', None)\n # The mutual information of this node's dataset with the phenotype.\n nx.set_node_attributes(dag, 'value', None)\n # The number of genes in the subtree rooted at this node.\n nx.set_node_attributes(dag, 'genes', None)" ]
[ "0.76893985", "0.7661167", "0.7623163", "0.7614204", "0.7602438", "0.7602438", "0.7597835", "0.7509312", "0.7497298", "0.74624723", "0.7459934", "0.74241686", "0.74147093", "0.74051684", "0.7398285", "0.7398285", "0.7398285", "0.7396945", "0.7325279", "0.72634804", "0.7246107", "0.7224348", "0.7222058", "0.7194337", "0.7118192", "0.71116525", "0.71007645", "0.70893514", "0.7049821", "0.70462143", "0.70294034", "0.69864786", "0.69731927", "0.6965952", "0.69543403", "0.69366026", "0.6915466", "0.69147974", "0.68999237", "0.6864165", "0.68460643", "0.6833612", "0.6814226", "0.681176", "0.6795873", "0.67941046", "0.67941046", "0.67938215", "0.6788146", "0.67776746", "0.6772074", "0.675267", "0.675267", "0.67394626", "0.67386866", "0.67308646", "0.67228353", "0.67145884", "0.6709274", "0.6708699", "0.66912776", "0.6682741", "0.668178", "0.6676509", "0.667149", "0.6670158", "0.66566634", "0.66549003", "0.6640109", "0.6633435", "0.66101545", "0.6608015", "0.6603211", "0.65900683", "0.65893", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.6581532", "0.657651", "0.65693706", "0.6564844", "0.65596545", "0.6556814", "0.6537116", "0.6534039", "0.6532827", "0.6530694", "0.6530694", "0.65298873" ]
0.6563765
92
DO NOT EDIT Determine if two nodes are equal (same value)
def __eq__(self, other): if other is None: return False if self.value == other.value: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, other):\n return type(self) == type(other) and self.node is other.node", "def nodes_are_equal(node1, node2):\n\n try:\n return dump_ast(node1).strip() == dump_ast(node2).strip() and \\\n node1.lineno == node2.lineno and \\\n node1.col_offset == node2.col_offset\n except:\n return False", "def _node_equal(self, other):\n # We're not equal if other isn't a Node, or if other is a different class.\n if not isinstance(other, Node) or not isinstance(other, self.__class__):\n return False\n # Loop through all children, checking whether they are equal\n for self_child, other_child in zip(self.getChildren(), other.getChildren()):\n if not self_child == other_child:\n return False\n # If we get here, our two nodes much be equal\n return True", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n if not isinstance(other, Node):\n return False\n return self.data == other.data", "def equals(self, *args):\n return _libsbml.XMLNode_equals(self, *args)", "def __eq__(self, other_node):\n return self.state == other_node.state", "def is_equal(self, a, b):\n return a.X[0] == b.X[0]", "def __eq__(self, other):\n if not isinstance(other, Node):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, node):\n if node == None or self.element != node.element:\n return False\n return self.index == node.index", "def __eq__(self, another_node):\n return Node.state_as_string(self.state) == Node.state_as_string(another_node.state)", "def __eq__(self, node):\n return (self.entry == node.entry)", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def __eq__(self, other) -> bool:\n if not isinstance(other, self.__class__):\n return False\n\n if self.number_of_nodes() != other.number_of_nodes():\n return False\n if self.number_of_edges() != other.number_of_edges():\n return False\n\n if list(self.nodes) != list(other.nodes):\n return False\n\n # Compare node data.\n for i in self.nodes:\n # We may want to exclude the 'name' attribute from comparisons, assuming\n # it has no logical meaning.\n if self.nodes[i] != other.nodes[i]:\n return False\n\n if list(self.edges) != list(other.edges):\n return False\n\n for i, j in self.edges:\n # Compare edge data.\n if self.edges[i, j] != other.edges[i, j]:\n return False\n\n return True", "def __eq__(self, node):\n if node == None or self.element != node.element:\n return False\n return self.left == node.left and self.right == node.right", "def identical_to(self, elem):\n\n return (self.n1 == elem.n1) and (self.n2 == elem.n2)", "def isSameTree(self, node1, node2):\n # print(\"isSameTree call for {} and {}\".format(node1.id, node2.id))\n\n if node1.id == node2.id:\n return True\n if node1.value == node2.value:\n # Compare children, in sorted order based on value\n node1Children = list(\n sorted(\n node1.neighbors,\n key=lambda node:\n node.value))\n node2Children = list(\n sorted(\n node2.neighbors,\n key=lambda node:\n node.value))\n\n if len(node1Children) == len(node2Children):\n # For identical trees, A list of nieghbors\n # in sorted (based on value) order:\n # Should have same length\n # At each position, values are same (verify recursively)\n for i in range(len(node1Children)):\n if not self.isSameTree(node1Children[i], node2Children[i]):\n return False\n # All neighbor pairs verified\n return True", "def __eq__(self, other):\n\n return (self.nodes[0].id == other.nodes[0].id) & \\\n (self.nodes[1].id == other.nodes[1].id) & \\\n (self.name == other.name)", "def __eq__(self, other):\n # check equality of the nodesets\n return self.nodeset.__eq__(other.get_nodeset())", "def test_equals(self):\n parameters = [\n (1, 'a', False),\n (1, None, False),\n (1, 2, False),\n (1, 1, True)\n ]\n for pair in parameters:\n with self.subTest(pair=pair):\n self.getLogger().info('Next pair %s', pair)\n _obj1 = Node(pair[0])\n _obj2 = None if not pair[1] else Node(pair[1])\n self.assertEqual(_obj1._equals(_obj2), pair[2])\n _objSelf = Node(1)\n self.assertTrue(_objSelf._equals(_objSelf))", "def testEquality(self):\n pass", "def __eq__(self, other):\n if not isinstance(other, Node):\n return NotImplemented\n return self.state == other.state", "def test_deep_equals(obja, objb, isequal):\n\n objatree = wo.typedtree(obja)\n objbtree = wo.typedtree(objb)\n match = objatree == objbtree\n ok = match == isequal\n\n if ok:\n s = \"pass\"\n else:\n s = \"fail\"\n\n print(f\"{obja} == {objb} is {match} : {s}\")\n return ok", "def is_identical(self, tree1, tree2):\r\n if not tree1 and not tree2:\r\n return True\r\n elif tree1 and tree2:\r\n return (tree1.root == tree2.root and self.is_identical(tree1.left,tree2.left) and self.is_identical(tree1.right, tree2.right))\r\n else:\r\n return False", "def node_match(n1, n2):\r\n return n1['name'] == n2['name'] and n1['modes'] == n2['modes']", "def assertNodesEqual(self, a, b):\n self.assertEqual((a.version, a.address, a.service, a.properties),\n (b.version, b.address, b.service, b.properties))", "def compare_nodes(n1, n2):\n return n1['g_val'] + n1['h_val'] < n2['g_val'] + n2['h_val']", "def _is_node_identical(self, job_name_a, job_name_b):\n\n node_a = self._graph_a.get_node(job_name_a)\n node_b = self._graph_b.get_node(job_name_b)\n\n # Check for same job type name and version\n if node_a.job_type_name != node_b.job_type_name or node_a.job_type_version != node_b.job_type_version:\n return False\n\n # Check that A and B have matching parents that are identical to one another\n a_parent_names = set(a_parent.node_name for a_parent in node_a.parents)\n for b_parent in node_b.parents:\n b_parent_name = b_parent.node_name\n if b_parent_name not in self._identical_nodes:\n return False # B has a parent that is not identical to any other node\n matched_a_parent_name = self._identical_nodes[b_parent_name]\n if matched_a_parent_name not in a_parent_names:\n return False # B has a parent that does not match a parent of A\n a_parent_names.remove(matched_a_parent_name)\n if a_parent_names:\n return False # A has a parent that does not match a parent of B\n\n # Check that A and B use the same inputs\n a_inputs = dict(node_a.inputs)\n for b_input_name in node_b.inputs:\n if b_input_name not in a_inputs:\n return False # B input not defined for A\n b_input = node_b.inputs[b_input_name]\n a_input = a_inputs[b_input_name]\n if not a_input.is_equal_to(b_input, self._matched_recipe_inputs, self._identical_nodes):\n return False # A and B have a non-matching input\n del a_inputs[b_input_name]\n if a_inputs:\n return False # A input not defined for B\n\n return True", "def __eq__(self, other):\n if not isinstance(other, NodeProperties):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n if self is other:\n return True\n elif type(self) != type(other):\n return False\n else:\n # A node is considered equal if it has the exact same state as\n # another node\n if self.board_state == other.board_state:\n return True\n else:\n return False", "def equivalent(kls, first, second):\n if first.empty() and second.empty():\n return True\n elif first.vertices.shape[0] != second.vertices.shape[0]:\n return False\n elif first.edges.shape[0] != second.edges.shape[0]:\n return False\n\n EPSILON = 1e-7\n\n vertex1, ct1 = np.unique(first.vertices, axis=0, return_counts=True)\n vertex2, ct2 = np.unique(second.vertices, axis=0, return_counts=True)\n \n vertex_match = np.all(np.abs(vertex1 - vertex2) < EPSILON)\n ct_match = np.all(ct1 == ct2)\n if not (vertex_match and ct_match):\n return False\n\n g1 = nx.Graph()\n g1.add_edges_from(first.edges)\n g2 = nx.Graph()\n g2.add_edges_from(second.edges)\n edges_match = nx.is_isomorphic(g1, g2)\n del g1 \n del g2\n\n if not edges_match:\n return False\n\n second_verts = {}\n for i, vert in enumerate(second.vertices):\n second_verts[tuple(vert)] = i\n \n attrs = [ attr['id'] for attr in first.extra_attributes ]\n for attr in attrs:\n buf1 = getattr(first, attr)\n buf2 = getattr(second, attr)\n if len(buf1) != len(buf2):\n return False\n\n for i in range(len(buf1)):\n i2 = second_verts[tuple(first.vertices[i])]\n if buf1[i] != buf2[i2]:\n return False\n\n return True", "def __eq__(self, other):\n if not isinstance(other, StateSyncNode):\n return False\n\n return self.__dict__ == other.__dict__", "def values_eq(self, a, b):\r\n return a == b", "def isEquivalent(self, oth: 'StateNode') -> bool:\n a = [self.table[i][j] for i in self.state[0] for j in self.state[1]]\n b = [oth.table[i][j] for i in oth.state[0] for j in oth.state[1]]\n if len(a) != len(b):\n return False\n if len(a) < 1 or len(b) < 1 or len(a[0]) != len(b[0]):\n return False\n for i in range(len(a)):\n for j in range(len(a[0])):\n if a[i][j] != b[i][j]:\n return False\n return True", "def __eq__(self, other: 'PriorityNode') -> bool:\n return self.priority == other.priority and self.value == other.value", "def are_the_same(node_before, node_after) -> bool:\n\n if node_before.algorithm != node_after.algorithm:\n return False\n elif not _is_output_name_same(node_before, node_after):\n return False\n else:\n for attr in interested_attrs:\n if _exists_attr(attr, node_before, node_after) == 1 or \\\n _exists_attr(attr, node_before, node_after) == 2:\n return False\n elif _exists_attr(attr, node_before, node_after) == 12 and \\\n node_before.attributes[attr] != node_after.attributes[attr]:\n return False\n return True", "def test_eq():\n # Test for equality special method with scalar Rnode object and float value\n x = Rnode(2.0)\n try:\n assert (x == 2.0) == True\n assert (x == 1.0) == False\n except AssertionError as e:\n print(e)\n raise AssertionError\n\n # Test for equality special method with two scalar Rnode object\n x = Rnode(2.0)\n y = Rnode(2.0)\n z = Rnode(1.0)\n try:\n assert (x == y) == True\n assert (x == z) == False\n except AssertionError as e:\n print(e)\n raise AssertionError", "def same(self, x, y):\n return self.find(x) == self.find(y)", "def __eq__(self, other):\n self = filter_tree(self, _remove_visit_meta)\n return super(Node, self).__eq__(filter_tree(other, _remove_visit_meta))", "def __eq__(self, other):\n # check equality of names and attributes as well as that of the incident Node objects\n return \\\n self.weight == other.get_weight() and \\\n self.attributes.__eq__(other.get_attributes()) and \\\n self.get_incident_nodes().__eq__(other.get_incident_nodes())", "def is_equal(self, a, b):\n return a == b", "def __eq__(self, other):\n return self.left == other.left and self.right == other.right and self.left2 == other.left2 and self.right2 == other.right2", "def compareNodes(x, y):\n return x.pathValue - y.pathValue", "def identical_to(self, elem):\n\n return (self.n1 == elem.n1) and (self.n2 == elem.n2) and (self.n3 == elem.n3) and (self.n4 == elem.n4)", "def __eq__(self, other):\n return self.element() == other.element()", "def test_equivalency(self):\n def compare_func(obj, node):\n # same id\n self.assertEqual(obj.id, node.get(\"id\"))\n\n # same html\n self.assertEqual(obj.html.prettify, node.prettify)\n\n # parents have same id (only for non-root elements)\n if not obj == self.document.root:\n self.assertEqual(obj.parent.id, node.parent.get(\"id\"))\n\n # same number of children\n child_nodes = self.get_children_of_node(node)\n self.assertEqual(len(obj.children), len(child_nodes))\n\n # children have same ids\n for (child_obj, child_node) in zip(obj.children, child_nodes):\n self.assertEqual(child_obj.id, child_node.get(\"id\"))\n\n self.recursively_compare_tree_against_html(compare_func)", "def __eq__(self, other: 'Tree') ->bool:\n return (type(self) is type(other) and\n self.value == other.value and\n self.children == other.children)", "def assertNodesEqual(self, first, second):\n def get_attrs(l):\n result = []\n for n in l:\n result.append((n.service, n.address, n.version, n.properties))\n return result\n self.assertEqual(get_attrs(first), get_attrs(second))", "def _match_identical_nodes(self):\n\n for job_name_b in self._topo_b_nodes:\n for job_name_a in self._unresolved_a_nodes:\n if self._is_node_identical(job_name_a, job_name_b):\n self._identical_nodes[job_name_b] = job_name_a\n self._unresolved_a_nodes.remove(job_name_a)\n self._unresolved_b_nodes.remove(job_name_b)\n break", "def compare(self, node) -> bool:\n\t\t# No conflicts, Return True\n\t\treturn True", "def test_equal_on_equal(self):\n a = objects.OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)\n b = objects.OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)\n self.assertTrue(a == b)\n self.assertTrue(b == a)", "def is_equal(self, a, b):\n return a is b", "def __eq__(self, other):\n if not isinstance(other, NodeStatus):\n return False\n\n return self.__dict__ == other.__dict__", "def _exact_compare(tree1, tree2):\n attrs = ['name', 'length', 'support']\n for n1, n2 in zip(tree1.postorder(), tree2.postorder()):\n for attr in attrs:\n if getattr(n1, attr, None) != getattr(n2, attr, None):\n return False\n return True", "def __eq__(self, other):\n return other.left == self.left and other.right == self.right", "def __eq__(self, other):\n return self.value == other or self.value == other.value", "def __eq__(self,other):\n boul0 = self.linked[0].coordinates[0]==other.linked[0].coordinates[0] and self.linked[0].coordinates[1]==other.linked[0].coordinates[1]\n boul1 = self.linked[1].coordinates[0]==other.linked[1].coordinates[0] and self.linked[1].coordinates[1]==other.linked[1].coordinates[1]\n boulid = self.id==other.id\n return boul0 and boul1 and boulid", "def inside_itself(self):\n for i in range(2, len(self.nodes)):\n if self.nodes[0] == self.nodes[i]:\n return True\n return False", "def __eq__(self, other: RBtree) -> bool:\n comp = lambda n1, n2: n1 == n2 and ((comp(n1.left, n2.left) and comp(n1.right, n2.right)) if (n1 and n2) else True)\n return comp(self.root, other.root) and self.size == other.size", "def __eq__(self, other):\n if isinstance(other, type(self)):\n same_edges = self._edges == other._edges\n same_weights = self._weights == other._weights\n return same_edges and same_weights\n else:\n return False", "def __eq__(self, other):\n return self.value == other.value", "def are_equal(self, sp1, sp2):\n return True", "def __eq__(self, other):\n if other is None or not isinstance(other, IDLNode):\n return 1\n return self.__dict__.__eq__(other.__dict__)", "def __eq__(self, other):\n if not type(other) == type(self):\n return False\n sedges, oedges = self.edges, other.edges\n return ((len(sedges) == len(oedges)) and\n all(numpy.all(se == oe) for (se, oe) in zip(sedges, oedges)))", "def __eq__(self, other):\n return (self.vertices == other.vertices and self.weight == other.weight)", "def _is_equal(x, y):\n return x[0] == y", "def verifyNodeEq(self,path: str,value):\n actureValue = getNodeValue(self.data, path)\n assert str(actureValue) == value, \"assert jsonNodeEq fail. \" + str(self.data) + \" did not equal \" + value + \" with path: \" + path", "def is_connected_same_node(graph, node_u, node_v):\n for out_neighbor_u in graph.get_deductive_out_neighbors(node_u):\n for out_neighbor_v in graph.get_deductive_out_neighbors(node_v):\n if out_neighbor_u == out_neighbor_v:\n return True\n return False", "def compare_ll(node1: LinkedListNode, node2: LinkedListNode) -> bool:\n while node1 and node2:\n if node1.data == node2.data:\n node1 = node1.next\n node2 = node2.next\n else:\n return False\n return not node1 and not node2", "def are_equal(value1, value2):\n if value1 == None or value2 == None:\n return True\n if value1 == None or value2 == None:\n return False\n return value1 == value2", "def are_equal(self, sp1, sp2):\n return", "def __eq__(self, other) -> bool:\n if other is None or not isinstance(other, Graph):\n name = other.name if other else None\n print(f'{name} is not a Graph object.')\n return False\n\n def match(op1: Operator, op2: Operator) -> bool:\n if not op1.equals(op2):\n print(f'{op1.name} is different.')\n return False\n\n # check input nodes and further\n for i1, i2 in zip(op1.input_ops.values(), op2.input_ops.values()):\n if not match(i1, i2):\n return False\n return True\n\n for o1, o2 in zip(self.get_outputs(), other.get_outputs()):\n if not match(o1, o2):\n return False\n return True", "def __eq__(self, second):\r\n\t\treturn self.x == other.x and self.y == other.y", "def __eq__(self, other):\r\n if other is not None:\r\n return self.value() == other.value()\r\n else:\r\n return False", "def is_same(self: _R, other: _R) -> bool:\n children = [i.render() for i in self.children]\n other_children = [i.render() for i in other.children]\n return other_children == children", "def __eq__(self, other):\r\n\t\treturn (self.type == other.type and self.value == other.value)", "def __eq__(self, other):\r\n\r\n return type(self) == type(other) and self.ttype == other.ttype", "def is_equivalent(self, other):\n A = self.minimization().relabeled()\n [initial] = A.initial_states()\n address = {initial: ()}\n for v in A.digraph().breadth_first_search(initial.label()):\n state = A.state(v)\n state_address = address[state]\n for t in A.iter_transitions(state):\n if t.to_state not in address:\n address[t.to_state] = state_address + tuple(t.word_in)\n\n B = other.minimization().relabeled()\n labels = {B.process(path)[1].label(): state.label()\n for (state, path) in address.iteritems()}\n try:\n return A == B.relabeled(labels=labels)\n except KeyError:\n return False", "def equals(x, y):\n return x == y", "def __eq__(self, other):\n if self.edges != other.edges:\n return False\n\n if self.name != other.name:\n return False\n\n return True", "def eq(self, other: Any) -> bool:\n # TODO: Rasswanth: Fix later after the comparison operation\n # relative\n # from .... import Tensor\n\n # if (\n # isinstance(self.child, Tensor)\n # and isinstance(other.child, Tensor)\n # and (self.child != other.child).child.any() # type: ignore\n # ):\n # return False\n\n # if (\n # isinstance(self.child, np.ndarray)\n # and isinstance(other.child, np.ndarray)\n # and (self.child != other.child).any()\n # ):\n # return False\n\n # if self.rank != other.rank:\n # return False\n\n # if self.ring_size != other.ring_size:\n # return False\n\n # if self.nr_parties != other.nr_parties:\n # return False\n\n # return True\n\n # ATTENTION: Why are we getting here now when we never did before?\n if not hasattr(other, \"child\"):\n return self.child == other\n\n return self.child == other.child", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y", "def __eq__(self, other):\n return ZeroaryOperator.__eq__(self, other) and \\\n self.relation_key == other.relation_key", "def __eq__(self, other: Vertex) -> bool:\n if isinstance(other, self.__class__):\n return self.id == other.id and self.edges == other.edges\n return False", "def __eq__(self, other):\n if not isinstance(other, ActivityLogTreeNode):\n return False\n\n return self.__dict__ == other.__dict__", "def is_equal(self, state1, state2):\n return self._replace_unks(state1) == self._replace_unks(state2)", "def __eq__(self, other: LinkedList) -> bool:\n curr1 = self._first\n curr2 = other._first\n are_equal = True\n\n while are_equal and curr1 is not None and curr2 is not None:\n if curr1.item != curr2.item:\n are_equal = False\n curr1 = curr1.next\n curr2 = curr2.next\n\n return are_equal", "def test_graphid_operator_eq_and_neq():\n\n for xstr, ystr in itertools.product([\"g1\", \"g2\", \"y7\", \"z123\"], repeat=2):\n x = _ir.GraphId(xstr)\n y = _ir.GraphId(ystr)\n\n if xstr == ystr:\n assert x == y\n assert not (x != y)\n else:\n assert not (x == y)\n assert x != y", "def isEqual (self, other) :\n return self.id == other.getIdent ()", "def __eq__(self, other):\n if not isinstance(other, NodeInterfaceAlias):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__( self, other ):\n return self.data == other.data", "def __eq__(self, other):\n parent_same = self.parent1.rid == other.parent1.rid \\\n and self.parent2.rid == other.parent2.rid\n\n parents_opposite = self.parent2.rid == other.parent1.rid \\\n and self.parent1.rid == other.parent2.rid\n\n return parent_same or parents_opposite", "def are_equal(self, sp1, sp2):\n return sp1 == sp2", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y", "def equivalent(self, other):\n return id(self) == id(other)" ]
[ "0.76085", "0.7590751", "0.7546388", "0.7519825", "0.7519825", "0.7519825", "0.7519825", "0.7519825", "0.74666744", "0.73625326", "0.73597276", "0.73304427", "0.7314779", "0.7209392", "0.7208096", "0.71472955", "0.71115476", "0.710639", "0.710639", "0.7105356", "0.7098044", "0.70584035", "0.70541406", "0.7047405", "0.7021934", "0.70133746", "0.6972418", "0.6945322", "0.6945135", "0.6945009", "0.692617", "0.68966454", "0.6881806", "0.68804985", "0.68648064", "0.68620884", "0.68348444", "0.68090904", "0.6804932", "0.6801198", "0.6800909", "0.679557", "0.67890203", "0.67835903", "0.67822784", "0.6758166", "0.67508715", "0.67423385", "0.6718681", "0.67065626", "0.6682769", "0.66526353", "0.66521513", "0.66483307", "0.6645658", "0.6637631", "0.66312796", "0.6627903", "0.66174793", "0.6604587", "0.66037506", "0.6593291", "0.6587754", "0.6587156", "0.65719885", "0.65650344", "0.6560074", "0.65455186", "0.6531127", "0.6526297", "0.6524241", "0.6519815", "0.65188843", "0.65132225", "0.65040886", "0.64874315", "0.64844954", "0.64776057", "0.64697754", "0.6460894", "0.64527124", "0.64499754", "0.6439282", "0.64390635", "0.64283365", "0.64182895", "0.64169914", "0.64151603", "0.6411573", "0.6410359", "0.6408289", "0.64063764", "0.6405573", "0.64011216", "0.639253", "0.6387822", "0.6385997", "0.6381028", "0.6377737", "0.63686675", "0.63661206" ]
0.0
-1
DO NOT EDIT String representation of a node
def __repr__(self): return str(self.value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_node_str():\n a_left = Node(7, data='pl left')\n a_right = Node(42, data='pl right')\n a = Node(13, data='pl a', left=a_left, right=a_right)\n string_a = str(a)\n expect_string = '13'\n assert string_a == expect_string", "def test_node_str():\n node_a = Node({'name':['list','of','vertex']})\n assert str(node_a) == \"{'name':['list','of','vertex']}\"", "def test_node_to_str(self):\n f = lws.node_to_str\n # normal\n assert f(('a', 'b')) == 'a: b'\n # exception\n assert f(('a',),) == \"('a',): \"\n assert f('a') == 'a: '", "def __str__(self) -> str:\n\t\treturn get_node_description(self.name)", "def nodeToString(cls, node):\n return lxml.etree.tostring(node, method='html').decode()", "def visit_Struct(self, node):\n return str_node(node)", "def as_str(node):\n node_string = ' '.join(k for k, _ in node.leaves())\n return u' '.join(node_string.split())", "def __str__(self) -> str:\n return 'Node({})'.format(self.yaml_node)", "def __repr__(self):\n return 'Node({!r})'.format(self.data)", "def rc_node(node):\n if node[-1] == \"'\": return node[:-1]\n else: return node + \"'\"", "def __repr__(self):\n\n return f\"<Node {self.data}>\"", "def __str__(self):\n\n pg_str = pformat(self.content)\n repr_str = \"Node ID: {} \\nNode Name: {} \\n{}\".format(self.id, self.name, pg_str)\n\n return repr_str", "def nodeToLongName(node):\n\n pass", "def __str__(self):\n string = \"\"\n cur_node = self.head\n while cur_node is not None:\n string += cur_node.data.__str__()\n cur_node = cur_node.next\n return string", "def __str__(self):\n return self.str_parse_tree(0)", "def __str__(self):\n return self.root_node", "def visit_Node(self, node):\n pass", "def serialize(node, tree=\"\"):\n \n \n if (not node): #Base case\n tree += \"# \"\n return tree\n tree += (str(node.val) + \" \")\n tree = serialize(node.left, tree)\n tree = serialize(node.right, tree)\n\n return tree", "def serialize(node):\r\n serial = node.val \r\n\r\n if node.left or node.right:\r\n serial += r'('\r\n\r\n if node.left:\r\n serial += serialize(node.left)\r\n \r\n serial += r'|' \r\n \r\n if node.right:\r\n serial += serialize(node.right)\r\n \r\n serial += r')'\r\n\r\n return serial", "def __str__(self) -> str:\n return 'UnknownNode({})'.format(self.yaml_node)", "def node_repr(node):\n\n result = History.name(node)\n if History.children(node):\n result += ':[' + ', '.join(map(History.node_repr, History.children(node))) + ']'\n return result", "def test_node__repr():\n a_left = Node(7, data='pl left')\n a_right = Node(42, data='pl right')\n a = Node(13, data='pl a', left=a_left, right=a_right)\n repr_a = repr(a)\n expect_repr = '<Node | Val: 13 | Data: pl a | Left: 7 | Right: 42>'\n assert expect_repr == repr_a", "def __str__(self):\n return self._tree.toString()", "def _(self, node: String):\n stripped_str = repr(node.string.replace('\\0',''))\n if stripped_str not in self.val_map:\n self.val_map.append(stripped_str)\n\n idx = self.val_map.index(stripped_str)\n return f\"Val{idx}\"", "def serialize(self, node, appstruct: ID) -> str:\n return super().serialize(\n node,\n str(appstruct)\n )", "def __str__(self):\n return '<Node%d> TC: %d BP: %d DN: %s\\n' % (self.id,\n self.travelCount,\n self.botPresent,\n repr(self.dirNodes))", "def test_repr(self):\n\n node = Node(\n {\n 'healthchecks': [],\n 'host': 'iwa-ait.org',\n 'port': 22,\n 'user': 'adm-technical',\n 'password': 'oh-some-secret'\n },\n {},\n mock.Mock()\n )\n\n self.assertIn('iwa-ait.org', str(node))\n self.assertIn('22', str(node))\n self.assertIn('adm-technical', str(node))\n self.assertNotIn('oh-some-secret', str(node))", "def __str__(self):\n # string representation includes values of all inner fields\n return \\\n \"Node Name: \" + str(self.name) + \"\\n\" + \\\n \"Node Attributes: \" + str(self.attributes) + \"\\n\" + \\\n \"Incident Edges: \" + \"\\n\".join([edge.__str__() for edge in self.incident_edges]) + \"\\n\"", "def __str__(self):\n # string representation includes values of all inner fields\n return \\\n \"Node Name: \" + str(self.name) + \"\\n\" + \\\n \"Node Attributes: \" + str(self.attributes) + \"\\n\" + \\\n \"Incident Edges: \" + \"\\n\".join([edge.__str__() for edge in self.incident_edges]) + \"\\n\"", "def nameToNode(name):\n\n pass", "def get_node_string(self, type_number):\n if type_number in self.node_backward_dict:\n return self.node_backward_dict[type_number]\n\n print(\"ERROR!! node type number is wrong\", type_number)\n quit()", "def __str__(self):\n name_str = \"node name is %s\\n\" % self.__name\n label_str = \"labels are %s\\n\" % str(self.__labels)\n propety_str = \"properties are %s\\n\" % str(self.__props)\n return name_str + label_str + propety_str", "def get_node_text(self):\n return self.node_text", "def to_node(value: str) -> Node:\n if not value:\n res = Empty()\n elif value in CONCATENATION_SYMBOLS:\n res = Concatenation()\n elif value in UNION_SYMBOLS:\n res = Union()\n elif value in KLEENE_STAR_SYMBOLS:\n res = KleeneStar()\n elif value in EPSILON_SYMBOLS:\n res = Epsilon()\n elif value[0] == \"\\\\\":\n res = Symbol(value[1:])\n else:\n res = Symbol(value)\n return res", "def visit_Typedef(self, node):\n return str_node(node)", "def test_node_repr_return():\n input = 42\n expected = f'<Node | Val: {input} | Next: None>'\n actual = repr(Node(input))\n assert expected == actual", "def __repr__(self):\n return 'BinaryNode({})'.format(repr(self.data))", "def visit(self, node):", "def visit(self, node):", "def __repr__(self):\n return '\\n~Node (' + str(self._val) + ') has ' + str(len(self._children)) + ' children: ' + str(sorted([val for val in self._children])) + '~'", "def visit_text(self, node):\n self.printer.text(node.xml_value)", "def __init__(self, node_text=\"\", node_type=0, node_parent=None):\n self.node_text = node_text\n self.node_type = node_type\n self.node_parent = node_parent\n self.node_left = None\n self.node_right = None", "def node_to_string(self, node, tab_count=0, is_add_children=False):\n tabs_str = ''\n for i in range(0, tab_count):\n tabs_str += '\\t'\n\n node_str = tabs_str + str(node.value) + ': ' + str(node.count)\n\n children_str = ''\n if is_add_children:\n for child_node in node.children:\n children_str += '\\n\\t' + tabs_str + self.node_to_string(child_node, tab_count+1, True)\n\n return node_str + children_str", "def pretty_node(node_record):\n node_str = \"\"\n label = node_record.get(\"label\", None)\n properties = node_record.get(\"properties\", None)\n\n if label is None or properties is None:\n return str(node_record)\n else:\n # Add the label\n node_str += label + Utils._SEPARATOR\n\n # If this is an artifact, add the path\n if label == \"Artifact\":\n path = properties.get(\"path\", Utils._DEFAULT_FILENAME)\n\n if path is not None:\n # Shorten excessively long paths\n if len(path) >= Utils._MAX_PATH_LEN:\n substr_len = Utils._MAX_PATH_LEN // 2\n subpath = path[0:substr_len] + \"...\"\n subpath += path[len(path)-substr_len:]\n path = subpath\n node_str += path\n\n # If this is a process, add information about it\n elif label == \"Process\":\n pid = properties.get(\"pid\", None)\n uid = properties.get(\"uid\", None)\n user = properties.get(\"user\", None)\n proc_name = properties.get(\"pidname\", None)\n node_str += \"[uid\" + Utils._SEPARATOR + str(uid)\n node_str += Utils._SUB_SEPARATOR\n node_str += \"pid\" + Utils._SEPARATOR + str(pid)\n if user is not None:\n node_str += Utils._SUB_SEPARATOR + \"user\"\n node_str += Utils._SEPARATOR + user\n if proc_name is not None:\n node_str += Utils._SUB_SEPARATOR + \"pidname\"\n node_str += Utils._SEPARATOR + proc_name\n node_str += \"]\"\n\n return node_str", "def __str__(self):\n s = \"--\\n\"\n for node in self:\n s += node.__str__() + \"\\n\"\n return s + \"--\"", "def __repr__(self):\r\n\r\n if self.content is not None:\r\n return \"%s:\\\"%s\\\"\" % (self.tag_name, self.content)\r\n else:\r\n return \"%s:%s\" % (self.tag_name, str(self.children))", "def nodeToShortName(node):\n\n pass", "def __str__(self):\n return \"NODE: \" + str(self.num_childs) + \" \" + str(self.num_metadata)", "def print_nodes(self, s):\n\t\treturn ' '.join((s.replace(\"\\n\",\" \")).split())", "def test_node_str_return():\n input_a = [7, 13, 42]\n input_b = 42\n expected_a = str(input_a)\n expected_b = str(input_b)\n actual_a = str(Node(input_a))\n actual_b = str(Node(input_b))\n assert expected_a == actual_a\n assert expected_b == actual_b", "def test_str():\n # Test for string special method with scalar Rnode objects\n x = Rnode(1.0)\n try:\n assert str(x) == 'Reverse-mode Rnode Object ( Values: 1.0 )'\n except AssertionError as e:\n print(e)\n raise AssertionError", "def test_graph_str():\n node_list = []\n node_list.append(Node({'A':['B','C']}))\n node_list.append(Node({'B':['C','D']}))\n node_list.append(Node({'C':['D']}))\n g = Graph(node_list)\n assert str(g) == \"[{'A':['B','C']},{'B':['C','D']},{'C':['D']}]\"", "def get_node_tree_print_string(node: Node) -> str:\n node_io = io.StringIO()\n pre_order_travel(node, PrintVisitor(\n node_io,\n show_trigger=True,\n show_event=True,\n show_limit=True,\n show_meter=True,\n show_repeat=True,\n show_parameter=True,\n ))\n node_text = node_io.getvalue()\n return node_text", "def __repr__(self):\n return str(self.nodes)", "def node_formatter(path_node):\n # type: (PathParam) -> str\n if path_node.type:\n node_type = TYPE_MAP.get(path_node.type, 'str')\n return \"<{}:{}>\".format(path_node.name, node_type)\n return \"<{}>\".format(path_node.name)", "def __str__(self):\n temp = self.__head\n ss = []\n while temp is not None:\n ss.append(str(temp.data))\n temp = temp.next_node\n return ('\\n'.join(ss))", "def tree(self) -> Node:\n return Node(self.to_string())", "def get_text(node) -> str:\n result = ''\n if node.text:\n result = node.text\n for elem in node:\n result += GLGenerator.get_text(elem)\n if node.tail:\n result += node.tail\n return result", "def __str__(self):\n return self.__id__() + \" || \" + str(self.__node_a.name) + \" -> \" + str(self.__node_b.name)", "def convertXMLNodeToString(*args):\n return _libsbml.XMLNode_convertXMLNodeToString(*args)", "def __repr__(self):\n return f'PrefixTreeNode({self.character!r})'", "def __repr__(self):\n return 'TreeNode({0})'.format(self.data)", "def make_node_text(self):\n fmtstr = ub.codeblock(\n '''\n process {name}\n :: {type}\n ''')\n parts = [fmtstr.format(name=self.name, type=self.type)]\n if self.config:\n if isinstance(self.config, six.string_types):\n parts.extend(self.config.splitlines())\n else:\n for key, val in self.config.items():\n parts.append(' :{key} {val}'.format(key=key, val=val))\n text = '\\n'.join(parts)\n return text", "def __str__(self):\n s = \"\"\n current = self.__head\n while current:\n s += str(current.data) + \"\\n\"\n current = current.next_node\n return s[:-1]", "def XMLNode_convertXMLNodeToString(*args):\n return _libsbml.XMLNode_convertXMLNodeToString(*args)", "def __str__(self):\r\n T = Btree(2)\r\n T.root = Node(self.keys, [Node(child.keys, []) for child in self.children])\r\n return str(T)", "def __str__(self):\n temp = \"head\"\n temp_node = self.head\n while temp_node is not None:\n temp += f' -> {temp_node.val}'\n temp_node = temp_node.next\n temp += f'-> None'\n return temp", "def dump_node(self, node: Node) -> None:\n\n if not node:\n return\n\n nodeStr = f\"\"\"{self.get_unique_vertex_name(node)}[\\n\n \\tlabel = \\\"{self.dump_label(node)}\\\"\\n\n \\tshape = \\\"record\\\"\\n\n \\tstyle=\\\"filled,rounded\\\"\\n\n \\tfillcolor={self.get_color(node)}\\n\n penwidth = 2];\\n\"\"\"\n self.vertices_.append(nodeStr)", "def __str__(self):\n values = \"\"\n node = self.head\n while node:\n values = values + \"{} \".format(node.__str__())\n node = node.next\n return values", "def __str__(self):\n reprStr = ''\n currNode = self.head\n while currNode:\n reprStr = reprStr + str(currNode.count) + ' ' + str(currNode.data) + '\\n'\n currNode = currNode.next\n return reprStr", "def __str__(self):\n string = \"\"\n cur = self.__head\n while cur is not None:\n string += str(cur.data)\n cur = cur.next_node\n if cur is not None:\n string += \"\\n\"\n return string", "def __str__(self):\n _str = \"\"\n current_node = self._head\n while(current_node != None):\n _str += str(current_node.value)\n _str += \" -> \"\n current_node = current_node.next\n _str += \"None\"\n return _str", "def make_label(self, node):\n\t\tcurstring = str(node.__class__)[13:-2]\n\t\tif isinstance(node, ast.Name):\n\t\t\tcurstring = node.id\n\t\telif isinstance(node, ast.Num):\n\t\t\tcurstring = str(node.n)\n\t\telif isinstance(node, ast.Str):\n\t\t\tcurstring = node.s\n\n\t\tif isinstance(node, ast.Load) or isinstance(node, ast.Store) or \\\n\t\t\tisinstance(node, ast.Param) or isinstance(node, ast.Add) or \\\n\t\t\tisinstance(node, ast.Sub) or isinstance(node, ast.Mult):\n\t\t\treturn None\n\n\t\ttry:\n\t\t\tself.labels[str(node)] = curstring\n\t\t\treturn str(node)\n\t\texcept AttributeError:\n\t\t\treturn None", "def __repr__(self):\n return \"{}: {}\".format(self.nodeid, self.lemma)", "def __repr__(self):\r\n node_rep = \"{} RBTreeNode(value = {}\".format(self.color, self.value)\r\n node_rep += \", left=RBTreeNode({})\".format(self.left.value) if self.left else \", left=NONE\"\r\n node_rep += \", right=RBTreeNode({})\".format(self.right.value) if self.right else \", right=NONE\"\r\n node_rep += \", parent=RBTreeNode({}))\".format(self.parent.value) if self.parent else \", parent=None)\"\r\n return node_rep", "def val(node: md.Document) -> str:\n try:\n node.normalize()\n return node.firstChild.wholeText.strip() # Handles CDATASection too\n except AttributeError:\n return \"\"", "def __str__(self):\n\t\tself._synchronize_attributes()\n\t\ts = \"\"\n\t\tqueue = c3.Queue()\n\t\tlevel = 0\n\t\tqueue.enqueue((1, self._root))\n\t\twhile queue.peek():\n\t\t\tnodelev, node = queue.dequeue()._data\n\t\t\tif (not node):\n\n\t\t\t\t#NODE IS NOT THERE - just a placeholder\n\t\t\t\t#print spacing and enqueue fake left and right children\n\t\t\t\t#but stops if they would be past the max depth of the tree\n\t\t\t\tif ((self._depth - nodelev + 1) <= 0):\n\t\t\t\t\tcontinue\n\n\t\t\t\tif (nodelev != level):\n\t\t\t\t\ts += \"\\n\"\n\t\t\t\t\t#PRINT THE INDENT\n\t\t\t\t\tindent = \" \"*int((self._max_chars)*(2**(self._depth - nodelev) - 1))\n\t\t\t\t\ts += indent\n\t\t\t\t\tlevel = nodelev\n\n\t\t\t\t#PRINT THE SPACING\n\t\t\t\ts += \" \"*(self._max_chars)*(2**(self._depth - nodelev + 1) - 1)\n\n\t\t\t\t#PRINT SPACES TO REPLACE DATA\n\t\t\t\ts += \" \"*self._max_chars\n\n\t\t\t\t#Enqueue fake children\n\t\t\t\tqueue.enqueue((nodelev + 1, None))\n\t\t\t\tqueue.enqueue((nodelev + 1, None))\n\t\t\t\tcontinue\n\n\t\t\tif (nodelev != level):\n\t\t\t\ts += \"\\n\"\n\t\t\t\t#PRINT THE INDENT\n\t\t\t\tindent = \" \"*(self._max_chars)*(2**(self._depth - nodelev) - 1)\n\t\t\t\ts += indent\n\t\t\t\tlevel = nodelev\n\n\t\t\t#adds preceding \"|\"s if the str length of the data is smaller than the max\n\t\t\tfor i in range(int(self._max_chars - len(str(node.value())))):\n\t\t\t\ts += \"|\"\n\t\t\ts += str(node.value()) \n\n\t\t\t#PRINT THE SPACING\n\t\t\tspacing = \" \"*(self._max_chars)*(2**(self._depth - nodelev + 1) - 1)\n\t\t\ts += spacing\n\n\t\t\t#Enqueues\n\t\t\tif node.lchild():\n\t\t\t\tqueue.enqueue((nodelev + 1, node.lchild()))\n\t\t\telse:\n\t\t\t\t#ENQUEUES A FAKE NODE IN ORDER TO FORMAT THE TREE FOR MISSING NODES\n\t\t\t\tqueue.enqueue((nodelev + 1, None))\n\t\t\tif node.rchild():\n\t\t\t\tqueue.enqueue((nodelev + 1, node.rchild()))\n\t\t\telse:\n\t\t\t\t#ENQUEUES A FAKE NODE IN ORDER TO FORMAT THE TREE FOR MISSING NODES\n\t\t\t\tqueue.enqueue((nodelev + 1, None))\n\t\ts += \"\\n\"\n\t\treturn s", "def node_label(self):\n if (self.body in operators):\n return self.body.__name__\n else:\n return str(self.body)", "def add_node(self, node):", "def formatNode(node):\n return {'fullName': node.fullName, 'start': node.start, 'finish': node.finish, 'own': node.own, 'tag': node.tag}", "def leaf_str(self, value, depth, available):\n r = repr(value)\n if r[0]==\"'\":\n r = '\"' + r[1:-1].replace('\"', '\\\\\"').replace(\"\\\\'\", \"'\") + '\"'\n return r, False", "def serialize(self, root):\n if root == None:\n return \"\"\n \n data = []\n\n def traversal(root):\n if root == None:\n data.append('#')\n return \n \n data.append(str(root.val))\n traversal(root.left)\n traversal(root.right)\n return\n \n traversal(root)\n return ' '.join(data)", "def add_node_from_string(self, node):\n assert(node is not None)\n LOG.info(\"Try to add node=%s\" % node)\n\n try:\n net = NetNode(node)\n self.info.nodeAdd(net.ident)\n # update net-params (enabled + up)\n self.info.netNodeUpdate(net.nid, net.net_params())\n LOG.debug(\"Successfully added node: %s\", str(net))\n\n except TOPOLOGY.NodeAlreadyExists, exe:\n LOG.error(\"NodeAlreadyExists exception: %s\", str(exe))\n except TOPOLOGY.InternalProblems, exe:\n LOG.error(\"InternalProblems exception: %s\", str(exe))\n except TOPOLOGY.InvocationNotAllowed, exe:\n LOG.error(\"InvocationNotAllowed exception: %s\", str(exe))\n except Exception, exe:\n LOG.error(\"Generic exception: %s\", str(exe))", "def __repr__(self):\r\n return \"ListNode({})\".format(self.data)", "def str_recursive(node):\n\n if node == None:\n return \"\"\n else:\n return str(node.item) + \" \" + LinkedList.str_recursive(node.next)", "def bytes_to_referent(string):\n d = pickle.loads(string)\n return RedBlackNode(\n RedBlackNodeRef(address=d['left']),\n d['key'],\n ValueRef(address=d['value']),\n RedBlackNodeRef(address=d['right']),\n d['color']\n )", "def content(node: etree.Element) -> str:\n return node.text if node.text else \"\"", "def name(node):\n\n return fst(node)", "def new_node(self, offset):\n # First we get the name of the node\n nameidx = self.string[offset:].find(b'\\0')\n name = self.string[offset: offset + nameidx]\n string_offset = offset + calc_length_word_align(nameidx + 1)\n node = FDTNode(name)\n return string_offset, node", "def shallowText(node):\n return \"\".join(shallowTextGenerator(node))", "def __unicode__(self):\n for n in self.children:\n if isinstance(n, StringType):\n return n\n return u\"\"", "def __init__(self, fields = None, chars = None):\r\n super(StringStripNode, self).__init__()\r\n\r\n self.fields = fields\r\n self.chars = chars", "def get_node_name(self):\n return util.join_names_underscore(self.name, str(self.as_pointer()))", "def __str__(self):\n if self.next is not None:\n return (str(self.value) + ' -->')\n return str(self.value)", "def convertStringToXMLNode(*args):\n return _libsbml.XMLNode_convertStringToXMLNode(*args)", "def __str__(self):\n stringRepresentation = []\n for node in self.getNodes():\n stringRepresentation.append(\"->\".join(\n (str(node), str(self.graph[node]))))\n\n return str(stringRepresentation)", "def get_node(self, key: str) -> Node:", "def dump_label(self, node: Node) -> str:\n\n labelStr = f\"\"\"{{ {{<Inputs>Inputs}}|\n {{ {node.get_kind_name()}\\lname: {node.get_name()} }}|\n {{<Outputs>Outputs}} }}\"\"\"\n return labelStr", "def __repr__ (self) -> String:\n\n st = (\"_Token(%r, '%s', %s, %r)\"\n % (self.start, self.text, self.kind, self.value))\n return st", "def __str__(self):\n next = self.next.data if self.next else \"None\"\n return \"%s->%s\" % (self.data, next)", "def parse_literal(node):\n # From client to database\n if isinstance(node, ast.StringValue):\n if isinstance(node.value, str):\n return node.value.upper()\n return node.value" ]
[ "0.71952814", "0.69486153", "0.6932254", "0.686408", "0.68391114", "0.683295", "0.68214226", "0.68039536", "0.6790645", "0.6687023", "0.6634845", "0.6466171", "0.6461758", "0.64257735", "0.6393528", "0.6344041", "0.63340676", "0.63146573", "0.63079375", "0.6293312", "0.6272459", "0.62190014", "0.621492", "0.6211648", "0.62115884", "0.6207662", "0.61908215", "0.6168645", "0.6168645", "0.6152029", "0.61505824", "0.61460304", "0.6134366", "0.61316085", "0.6124039", "0.6114577", "0.61125994", "0.6106334", "0.6106334", "0.6104009", "0.6094681", "0.60861033", "0.60849786", "0.6082626", "0.6078638", "0.6075552", "0.60681486", "0.60600644", "0.60515445", "0.6048182", "0.60269344", "0.60226464", "0.60213804", "0.6011538", "0.60097504", "0.6006166", "0.5973406", "0.59636396", "0.59586614", "0.5950352", "0.594535", "0.59303916", "0.5922049", "0.5911024", "0.5900512", "0.58907133", "0.5885448", "0.58724385", "0.5871824", "0.5853765", "0.5851712", "0.5834232", "0.5821119", "0.5820565", "0.5815804", "0.58114976", "0.58005613", "0.5787817", "0.57872653", "0.57842654", "0.5783355", "0.5772696", "0.57701576", "0.57664", "0.5764065", "0.5750389", "0.5748081", "0.5747662", "0.57438946", "0.5743779", "0.5738674", "0.5728633", "0.5725655", "0.5724388", "0.5721977", "0.5721569", "0.5720063", "0.57143515", "0.5710123", "0.5707972", "0.570268" ]
0.0
-1
DO NOT EDIT Create/initialize an empty linked list
def __init__(self, data=None): self.head = None # Node self.tail = None # Node self.size = 0 # Integer if data: [self.push_back(i) for i in data]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n node = ListNode(0) # dummy\n self.head = node\n self.tail = node\n self.len = 0", "def __init__(self, lst=[]):\n self.__length = 0 # current length of the linked list\n self.__head = None # pointer to the first node in the list\n for e in lst: # initialize the list,\n self.add(e) # by adding elements one by one", "def new_empty_ll():\n from linked_list import Linked_List\n this_empty_ll = Linked_List()\n return this_empty_ll", "def __init__(self):\n\t\tself.head = None\n\t\tself.tail = None", "def __init__(self):\n self.size = 0\n self.head, self.tail = Node(0), Node(0)\n self.head.next = self.tail\n self.tail.prev = self.head", "def __init__(self):\n self._head = self._Node(None, None, None)\n self._tail = self._Node(None, None, None)\n self._head._next = self._tail\n self._tail._prev = self._head\n self._size = 0", "def __init__(self):\n self.head = None\n self.tail = None\n self.current_node = None", "def __init__(self):\n\t\tself._head = None\n\t\tself._tail = None\n\t\tself._size = 0", "def __init__(self):\n self.head = None\n self.tail = None", "def __init__(self):\n self.head = None\n self.tail = None", "def __init__(self):\n\n self.head = None\n self.tail = None\n self.size = 0", "def __init__(self):\n self.head = None\n self.tail = None\n self.size = 0", "def __init__(self):\n self.head = None\n self.tail = self.head", "def __init__(self):\n self._head = None\n self._tail = None\n self._size = 0", "def __init__(self):\n self._head = None\n self._tail = None\n self._size = 0", "def __init__(self):\n self._head = None\n self._tail = None\n self._size = 0", "def __init__(self):\n\n self.head = None\n self.node_count = 0", "def __init__(self, head: ListNode):\n self.nodes = []\n\n while(head):\n self.nodes.append(head)\n head = head.next", "def __init__(self, head: ListNode):\n self.l = []\n while head:\n self.l.append(head.val)\n head = head.next", "def __init__(self):\r\n self._head = None\r\n self._tail = None\r\n self._size = 0", "def __init__(self):\n\t\tself.current = None\n\t\tself.head = None", "def __init__(self):\n self.head = None\n self.length = 0", "def __init__(self):\n self.head = None\n self.length = 0", "def __init__(self):\n self.head = None\n self.size = 0", "def __init__(self):\n self.head = None\n self.size = 0", "def __init__(self):\n self.head = None\n self.size = 0", "def __init__(self, items):\r\n if len(items) == 0: # No items, and an empty list!\r\n self._first = None\r\n else:\r\n self._first = _Node(items[0])\r\n curr = self._first\r\n for item in items[1:]:\r\n curr.next = _Node(item)\r\n curr = curr.next", "def __init__(self, data=None):\n if data is not None:\n self._size = 1\n self.head = Node(data)\n self.tail = self.head\n else:\n self._size = 0\n self.head = None\n self.tail = None", "def __init__(self):\n try:\n self.head=None\n\n except Exception as error:\n print (f\"There is error in __init__ of LinkedList, the error {error}\")", "def __init__(self):\n \n self.array = [LinkedListNode(None, None) for i in range(10000)]", "def __init__(self):\n self.head = None\n self.tail = None\n self.count = 0", "def __init__(self):\n self._head = self._tail = None\n self._size = 0", "def __init__(self, head: ListNode):\n self.head = head\n self.list = []\n while head:\n self.list.append(head.val)\n head = head.next", "def __init__(self, data=None):\n self.head = None\n self.tail = None\n if data is not None:\n try:\n for item in data:\n if item is data[0]:\n self.head = Node(item, next=None)\n self.tail = self.head\n else:\n self.head = Node(item, self.head)\n except TypeError:\n node = Node(data, next=None)\n self.head = node\n self.tail = self.head", "def __init__(self):\n self.size = 0\n self.head = Node(0)", "def __init__(self):\n self.head = ListNode()", "def __init__(self, lst=[]):\r\n self.__length = 0 # current length of the linked list\r\n self.__head = None # pointer to the first node in the list\r\n self.__last = None # pointer to the last node in the list\r\n lst.reverse() # reverse to ensure elements will appear in same order\r\n for e in lst: # add elements of input list lst one by one\r\n self.add(e)", "def __init__(self) -> None: \n SortedList.__init__(self)\n self.head = None", "def create_empty_node():\n from linked_list import Node\n return Node()", "def __init__(self):\n\n self.head = None", "def __init__(self):\n\n self.head = None", "def __init__(self):\n\n self.head = None", "def __init__(self):\n self.dummy = ListNode(-1)\n self.cnt = 0", "def __init__(self):\n self.length = 0\n self.head = None", "def __init__(self, items):\n if len(items) == 0:\n self._first = None\n self._rest = None\n else:\n self._first = items[0]\n self._rest = LinkedListRec(items[1:])", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self):\n self.head = None", "def __init__(self, strict=None):\n if strict is None:\n strict = False\n Node.strict = strict\n\n super(LinkedList, self).__init__()\n self.first_node = None", "def __init__(self):\n\n self.__head = None", "def __init__(self, items=None):\n self.head = None # First node\n self.tail = None # Last node\n # Append given items\n if items is not None:\n for item in items:\n self.append(item)", "def __init__(self):\r\n self.head = None", "def __init__(self):\n self.__head = None", "def __init__(self):\n self.__head = None", "def __init__(self, linked_list: object):\n self.current_node = linked_list._head", "def add_first(self, data):\n # define the head as the new Node\n self.head = Node(data, next=self.head)\n # if list was empty define th tail as the head\n if self.tail is None:\n self.tail = self.head\n # set the skip back pointer if needed\n if self.head.next is not None:\n if self.head.next.next is not None:\n self.head.next.next.skip_back = self.head", "def __init__(self, head):\n self.head = head\n self.length = 0\n node = head\n while node:\n node = node.next\n self.length += 1", "def __init__(self, data=None):\n self.head = None\n self.tail = None\n if data is not None:\n for value in data:\n self.append(value)", "def __init__(self, data=None):\n self.head = None \n if data is not None:\n for value in data:\n self.append(value)", "def __init__(self, iterable=None):\n # Initialize a new linked list to store the items\n # print(\"self __init__\", self)\n self.list = LinkedList()\n # self.top = self.list.head\n if iterable is not None:\n for item in iterable:\n self.push(item)", "def __init__(self):\n\n self.head = linkNode()\n self.tail = None\n # print(self.head.val)", "def __init__(self, data=None):\n self.data = data\n # initializing an empty node that has no next nor prior node\n self.next = self.prior = None", "def __init__(self, size=1):\n self._head = None\n self._tail = None\n self._size = 0", "def __init__(self, init_size=8):\n # Create a new list (used as fixed-size array) of empty linked lists\n self.buckets = [LinkedList() for _ in range(init_size)]", "def __init__(self, head: ListNode):\n self.head = head\n temp = head\n i = 0\n while temp is not None:\n i+=1\n temp = temp.next\n self.len = i # 找到list的长度", "def __init__(self):\n # Initializing an empty list.\n self.mylist = []", "def __init__(self, head: ListNode):\n self.head = head", "def __init__(self, head: ListNode):\n self.head = head", "def __init__(self, head: ListNode):\n self.head = head", "def __init__(self, head: ListNode):\n self.head = head", "def __init__(self, head: ListNode):\n self.head = head", "def __init__(self):\n self.min_stack = []\n self.listHead = LNode(0, 0)", "def __init__(self, item):\r\n self.item = item\r\n self.next = None # Initially pointing to nothing\r", "def __init__(self, l):\n self.l = l\n self.next = None\n self.prev = None\n self.prev_n = -1\n self.next_n = -1", "def __init__(self):\n self.head = Block()\n self.tail = Block()\n self.head.next = self.tail\n self.tail.prev = self.head\n self.mapping = {}", "def __init__(self):\n self.head = None", "def __init__(self, iterable=None):\n # Initialize a new linked list to store the items\n self.list = LinkedList()\n if iterable is not None:\n for item in iterable:\n self.push(item)", "def __init__(self, init):\n self.stepforward = int(init)\n self.data = Linkedlist()", "def __init__(self):\n self.head = PrefixNode('', False)", "def create_linked_list(input_list):\n\t\ttry:\n\t\t\thead = Node(input_list.pop(0)) #remove the first list item and return as its head\n\n\t\t\twhile (len(input_list)>0):\n\t\t\t\tcurrent_node = head\n\t\t\t\twhile current_node.next:\n\t\t\t\t\tcurrent_node = current_node.next\n\t\t\t\tcurrent_node.next = Node(input_list.pop(0))\n\n\t\texcept IndexError:\n\t\t\t\thead = None\n\t\treturn head", "def __init__(self, capacity):\n self.capacity = capacity #this is example for list implementation\n self.head = [None] * capacity #this is example for list implementation\n self.num_items = 0 #this is example for list implementation", "def __init__(self):\n self.l = []", "def simple_ll():\n ll = LinkedList()\n ll.push(20)\n ll.push(4)\n ll.push(15)\n ll.push(85)\n return ll", "def __init__(self, iterable=None):\n self.list = LinkedList()\n\n if iterable:\n for item in iterable:\n self.push(item)", "def create_linked_list(input_list):\n head=None\n for value in input_list:\n if head is None:\n head=Node(value)\n else:\n current_node=head\n while current_node.next:\n current_node=current_node.next\n current_node.next=Node(value)\n# printlist(head)\n# print('------')\n return head", "def __init__(self, head=None):\n\n self.head = head", "def constructList(vals):\n # Current method is iterative, recursive soln also exists\n head = ListNode(val=vals.pop(0))\n current = head\n while len(vals) > 0:\n nex = ListNode(val=vals.pop(0))\n current.next = nex; current = nex\n return head", "def __init__(self):\n self._head = None # reference to the head node\n self._size = 0 # number of stack elements", "def test_lined_list_create_with_non_iterable():\n from linked_list import Linked_List\n new_linked_list = Linked_List(-100)\n assert new_linked_list.head.value == -100", "def __init__(self, value = None):\n self.head = value\n self.size = 0" ]
[ "0.77725506", "0.77538705", "0.7744785", "0.76412094", "0.7635089", "0.7626129", "0.76229495", "0.7537509", "0.75358", "0.75358", "0.7468231", "0.7464318", "0.74601215", "0.7418059", "0.7418059", "0.7418059", "0.74112684", "0.7407795", "0.73824275", "0.73780024", "0.73205614", "0.7314495", "0.7314495", "0.729529", "0.729529", "0.729529", "0.72869486", "0.72686356", "0.72680944", "0.7243389", "0.7230653", "0.7224334", "0.72200924", "0.7202811", "0.7197179", "0.7176942", "0.71444845", "0.7126491", "0.712564", "0.7118043", "0.7118043", "0.7118043", "0.7117511", "0.71133816", "0.708222", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70769", "0.70503426", "0.7041793", "0.70017546", "0.6987931", "0.6983869", "0.6983869", "0.69734114", "0.6963289", "0.6938694", "0.6895471", "0.68838865", "0.6880816", "0.6869327", "0.68591726", "0.68521774", "0.6820981", "0.68129444", "0.6808314", "0.6801886", "0.6801886", "0.6801886", "0.6801886", "0.6801886", "0.6793744", "0.6793503", "0.67797893", "0.67760074", "0.67476237", "0.67382854", "0.6725563", "0.6718788", "0.67182547", "0.67139024", "0.6690542", "0.667039", "0.66595477", "0.66382706", "0.66173846", "0.6600102", "0.6584344", "0.65787876", "0.6571386" ]
0.6659353
94
DO NOT EDIT Defines "==" (equality) for two linked lists
def __eq__(self, other): if self.size != other.size: return False if self.head != other.head or self.tail != other.tail: return False # Traverse through linked list and make sure all nodes are equal temp_self = self.head temp_other = other.head while temp_self is not None: if temp_self == temp_other: temp_self = temp_self.next_node temp_other = temp_other.next_node else: return False # Make sure other is not longer than self if temp_self is None and temp_other is None: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, other: LinkedList) -> bool:\n curr1 = self._first\n curr2 = other._first\n are_equal = True\n\n while are_equal and curr1 is not None and curr2 is not None:\n if curr1.item != curr2.item:\n are_equal = False\n curr1 = curr1.next\n curr2 = curr2.next\n\n return are_equal", "def compare_ll(node1: LinkedListNode, node2: LinkedListNode) -> bool:\n while node1 and node2:\n if node1.data == node2.data:\n node1 = node1.next\n node2 = node2.next\n else:\n return False\n return not node1 and not node2", "def __eq__(self, other):\n\t\treturn (self.srcList == other.srcList) and (self.tgtList == other.tgtList)", "def compare_list(first, second):\n temp1 = first\n temp2 = second\n while temp1 and temp2:\n if temp1.data == temp2.data:\n temp1 = temp1.next\n temp2 = temp2.next\n else:\n return False\n if not temp1 and not temp2:\n return True\n return False", "def __eq__(self,other):\n boul0 = self.linked[0].coordinates[0]==other.linked[0].coordinates[0] and self.linked[0].coordinates[1]==other.linked[0].coordinates[1]\n boul1 = self.linked[1].coordinates[0]==other.linked[1].coordinates[0] and self.linked[1].coordinates[1]==other.linked[1].coordinates[1]\n boulid = self.id==other.id\n return boul0 and boul1 and boulid", "def __eq__(self, other):\n return type(self) == type(other) and self.node is other.node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n\n return (self.nodes[0].id == other.nodes[0].id) & \\\n (self.nodes[1].id == other.nodes[1].id) & \\\n (self.name == other.name)", "def __eq__(self,other):\n try: \n if not self.size == other.size and self.reference == other.reference: return False\n except: return False\n self_pointer, other_pointer = self.reference, other.reference\n for i in range(self.size):\n self_pointer, other_pointer = self_pointer.next, other_pointer.next\n if self_pointer != other_pointer: return False\n return True", "def is_intersecting(l1: ListNode, l2: ListNode) -> bool:\n while l1.next: # get last node of l1\n l1 = l1.next\n while l2.next: # get last node of l2\n l2 = l2.next\n return l1 == l2", "def __eq__(self, other):\r\n\t\treturn self._to_pylist() == other._to_pylist()", "def __eq__(self, other):\n if isinstance(other, DirectedGraphEdge):\n return self.head_vertex == other.head_vertex and self.tail_vertex == other.tail_vertex\n return NotImplemented", "def __eq__(self, other):\n return self.element() == other.element()", "def __eq__(self, other):\n # check equality of the nodesets\n return self.nodeset.__eq__(other.get_nodeset())", "def __eq__(self, other_node):\n return self.state == other_node.state", "def __eq__(self, other):\n if type(other) == type(self):\n if other.triple.relation == self.triple.relation:\n if (other.triple.concept1 == self.triple.concept1 \n and other.triple.concept2 == self.triple.concept2):\n return True\n return False", "def equals_list_nodes(self, self_list_nodes, other_list_nodes):\n\n #If the two list of nodes have different lengths, they are automatically different\n if len(self_list_nodes) != len(other_list_nodes):\n return False\n else:\n\n #We sort both list of nodes\n self_list_nodes = sorted(self_list_nodes, key=lambda x : x.num)\n other_list_nodes = sorted(other_list_nodes, key=lambda x : x.num)\n\n #For each node in both lists\n for i, node in enumerate(self_list_nodes):\n\n #We check if the length of the list of the nodes they are connected to are the same\n if len(node.connected_to) != len(other_list_nodes[i].connected_to):\n return False\n else:\n #We check if the connection between the nodes in both list is the same\n node_connected_to = sorted(node.connected_to, key=lambda x : x.num)\n other_node_connected_to = sorted(self_list_nodes[i].connected_to, key=lambda x : x.num)\n for j, node_connec in enumerate(node_connected_to):\n if node_connec.num != other_node_connected_to[j].num:\n return False\n return True", "def __eq__(self, other):\n if isinstance(other, GraphEdge):\n return self.head_vertex == other.head_vertex and self.tail_vertex == other.tail_vertex\n return NotImplemented", "def testEquality(self):\n list1 = [1,2]\n list2 = [1,2]\n \n # not the same object\n self.assertIsNot(list1,list2)\n self.assertNotEqual(id(list1),id(list2))\n self.assertFalse(list1 is list2)\n \n # but content is equal\n self.assertEqual(list1,list2)\n self.assertTrue(list1 == list2)\n \n ### \n list1 = []\n list2 = []\n self.assertFalse(list1 is list2) \n self.assertEqual(list1,list2)", "def __eq__(self, other):\n if isinstance(other, self.__class__):\n return (self.index == other.index) and \\\n (self.inUse == other.inUse) and \\\n (self.type == other.type) and \\\n (self.previousBlock == other.previousBlock) and \\\n (self.amount == other.amount) and \\\n (self.nextBlock == other.nextBlock) and \\\n (self.items == other.items)\n else:\n return False", "def __eq__(self, other):\n if isinstance(other, UnDirectedWeightedGraphEdge):\n if self.head_vertex != other.head_vertex:\n return False\n elif self.tail_vertex != other.tail_vertex:\n return False\n elif self.weight != other.weight:\n return False\n return True\n return NotImplemented", "def __eq__(self, other):\n if isinstance(other, DirectedWeightedGraphEdge):\n if self.head_vertex != other.head_vertex:\n return False\n elif self.tail_vertex != other.tail_vertex:\n return False\n elif self.weight != other.weight:\n return False\n return True\n return NotImplemented", "def __eq__(self, other):\n if not isinstance(other, Node):\n return False\n return self.data == other.data", "def __eq__(self, other):\n if not isinstance(other, CashFlowList):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def __eq__(self, other: 'ListPools') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def __eq__(self, other: 'PriorityNode') -> bool:\n return self.priority == other.priority and self.value == other.value", "def __eq__(self, other):\n return other and self.item == other.item", "def __eq__(self, other) -> bool:\n if not isinstance(other, self.__class__):\n return False\n\n if self.number_of_nodes() != other.number_of_nodes():\n return False\n if self.number_of_edges() != other.number_of_edges():\n return False\n\n if list(self.nodes) != list(other.nodes):\n return False\n\n # Compare node data.\n for i in self.nodes:\n # We may want to exclude the 'name' attribute from comparisons, assuming\n # it has no logical meaning.\n if self.nodes[i] != other.nodes[i]:\n return False\n\n if list(self.edges) != list(other.edges):\n return False\n\n for i, j in self.edges:\n # Compare edge data.\n if self.edges[i, j] != other.edges[i, j]:\n return False\n\n return True", "def __eq__(self, other: SymbolicObject) -> bool:\n\n if isinstance(other, ListObject):\n return self._subobjects == other._subobjects\n else:\n return False", "def __eq__(self, other):\n if not isinstance(other, ListGlobalValue):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, another_node):\n return Node.state_as_string(self.state) == Node.state_as_string(another_node.state)", "def __eq__(self, other):\n if self.capacity != other.capacity:\n return False\n for i in range(self.capacity):\n if self.data[i] != other.data[i]:\n return False\n return self.head == other.head and self.tail == other.tail and self.size == other.size", "def __eq__(self, other):\n if not isinstance(other, Node):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n pass", "def __eq__(self, other):\n pass", "def __eq__(self, other: 'LTL'):\n if self.formula == other.formula:\n return True\n implied_a = self >= other\n implied_b = self <= other\n return implied_a and implied_b", "def __eq__(self, other):\n return other.left == self.left and other.right == self.right", "def __eq__(self: 'TOAHModel', other: 'TOAHModel') -> bool:\n return self.stool_lst == other.stool_lst", "def __eq__(self, other):\n return self.left == other.left and self.right == other.right and self.left2 == other.left2 and self.right2 == other.right2", "def __le__(self, other):\n return self.head_vertex <= other.head_vertex and self.tail_vertex <= other.tail_vertex", "def __eq__(\n self: \"HereditaryStratumOrderedStoreList\",\n other: \"HereditaryStratumOrderedStoreList\",\n ) -> bool:\n # adapted from https://stackoverflow.com/a/4522896\n return (\n isinstance(\n other,\n self.__class__,\n )\n and self.__slots__ == other.__slots__\n and all(\n getter(self) == getter(other)\n for getter in [\n operator.attrgetter(attr) for attr in self.__slots__\n ]\n )\n )", "def __eq__(self, node):\n return (self.entry == node.entry)", "def __eq__(self, other):\n if not len(self) == len(other):\n return False\n for key in self.ordered_list:\n if key not in other.ordered_list:\n return False\n if self[key] != other[key]:\n return False\n return True", "def __eq__(self, other):\n return self.item == other", "def __eq__(self, other):\n return (self.start == other.start and self.end == other.end)", "def __eq__(self, other):\n return not self.__ne__(other)", "def equals(self, other): # -> bool:\n ...", "def __eq__(self, other):\r\n return self.ID == other.ID and self.index == other.index", "def __eq__(self, other):\n return ((self.item_A == other.item_A) and (self.item_B == other.item_B)) or (\n (self.item_A == other.item_B) and (self.item_B == other.item_A)\n )", "def __eq__(self, other):\n return self.start == other.start and self.end == other.end", "def __eq__(self, other):\n if not isinstance(other, StateSyncNode):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\r\n\t\tif self.eqHash == other.eqHash:\r\n\t\t\treturn True\r\n\t\telse:\r\n\t\t\treturn False", "def __eq__(self, other):\n return (\n self.begin == other.begin and\n self.end == other.end and\n self.data == other.data\n )", "def test_equals(self):\n parameters = [\n (1, 'a', False),\n (1, None, False),\n (1, 2, False),\n (1, 1, True)\n ]\n for pair in parameters:\n with self.subTest(pair=pair):\n self.getLogger().info('Next pair %s', pair)\n _obj1 = Node(pair[0])\n _obj2 = None if not pair[1] else Node(pair[1])\n self.assertEqual(_obj1._equals(_obj2), pair[2])\n _objSelf = Node(1)\n self.assertTrue(_objSelf._equals(_objSelf))", "def __eq__(self, other):\n self = filter_tree(self, _remove_visit_meta)\n return super(Node, self).__eq__(filter_tree(other, _remove_visit_meta))", "def __eq__(self, other):\n return self.value == other or self.value == other.value", "def __eq__(self, other: 'UserList') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\r\n if other is not None:\r\n return self.value() == other.value()\r\n else:\r\n return False", "def __eq__(self, other):\n if not isinstance(other, NodeProperties):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, *args):\n return _ida_frame.xreflist_t___eq__(self, *args)", "def __eq__(self, other):\r\n\r\n return type(self) == type(other) and self.ttype == other.ttype", "def __eq__(self, other):\r\n\t\treturn (self.type == other.type and self.value == other.value)", "def __le__(self, other):\n if self.head_vertex > other.head_vertex:\n return False\n elif self.tail_vertex > other.tail_vertex:\n return False\n elif self.weight > other.weight:\n return False\n return True", "def __le__(self, other):\n if self.head_vertex > other.head_vertex:\n return False\n elif self.tail_vertex > other.tail_vertex:\n return False\n elif self.weight > other.weight:\n return False\n return True", "def __eq__(self,other):\n\t\tif other != None:\n\t\t\treturn self.id==other.id and \\\n\t\t\t\t self.length == other.length and \\\n\t\t\t\t self.value==other.value\n\t\telse:\n\t\t\treturn False", "def __eq__(self, other):\r\n\t\tif(not(self.checkCmp(other))):\r\n\t\t\treturn False\r\n\r\n\t\tcmpflag = True\r\n\t\tfor li1, li2 in zip(self.vector, other):\r\n\t\t\tif(li1 != li2):\r\n\t\t\t\tcmpflag = False\r\n\t\treturn cmpflag", "def nodes_are_equal(node1, node2):\n\n try:\n return dump_ast(node1).strip() == dump_ast(node2).strip() and \\\n node1.lineno == node2.lineno and \\\n node1.col_offset == node2.col_offset\n except:\n return False", "def __eq__(self, other):\n if not isinstance(other, NodeStatus):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self,*args):\r\n pass", "def __eq__(self, other) -> bool:\r\n\t\treturn self.NextState is other", "def __eq__(self, other):\n return self.value == other.value", "def __eq__(self,other):\n return self is other", "def __eq__(self, other):\n if not isinstance(other, Node):\n return NotImplemented\n return self.state == other.state", "def __eq__(self, other):\n raise NotImplementedError" ]
[ "0.82909065", "0.77274597", "0.76242423", "0.75131154", "0.7275534", "0.7268777", "0.714763", "0.714763", "0.714763", "0.714763", "0.714763", "0.71005964", "0.7045595", "0.70243096", "0.6943484", "0.69265825", "0.6903196", "0.6843115", "0.68402064", "0.6791717", "0.676546", "0.674943", "0.67386645", "0.67213774", "0.6711115", "0.6703123", "0.669996", "0.6691926", "0.668502", "0.6674029", "0.6674029", "0.66663903", "0.66485935", "0.6647681", "0.6599728", "0.65781385", "0.65425396", "0.653407", "0.65296364", "0.6521819", "0.65186244", "0.65186244", "0.6489688", "0.64759886", "0.64709306", "0.6469639", "0.64673424", "0.64664537", "0.6461701", "0.6461535", "0.6460114", "0.6455086", "0.6453482", "0.6452752", "0.6441446", "0.6434724", "0.6432325", "0.6431494", "0.6415773", "0.641431", "0.6412814", "0.6406672", "0.63995844", "0.6392681", "0.6391331", "0.6387979", "0.63850963", "0.63827056", "0.6382565", "0.6378844", "0.6378844", "0.63770026", "0.6372842", "0.63653857", "0.6361619", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.6360589", "0.63592917", "0.6358952", "0.63579273", "0.63568974", "0.6333755" ]
0.7345923
4
DO NOT EDIT String representation of a linked list
def __repr__(self): temp_node = self.head values = [] if temp_node is None: return str([]) while temp_node is not None: values.append(temp_node.value) temp_node = temp_node.next_node return str(values)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n temp = self.__head\n ss = []\n while temp is not None:\n ss.append(str(temp.data))\n temp = temp.next_node\n return ('\\n'.join(ss))", "def to_string(self):\n try:\n items = \" \"\n current = self.head\n while current:\n items += f\"{ {current.value} }->\"\n current=current.next\n items+=\"NULL\"\n print (items)\n return items\n # items.append(current.value)\n # current = current.next\n # print(''.join(f\"{ {k[1]} }->\" for k in enumerate(items))+'NULL')\n # return(''.join(f\"{ {k[1]} }->\" for k in enumerate(items))+'NULL')\n except Exception as error:\n print (f\"There is error in __init__ of LinkedList, the error {error}\")", "def __str__(self) -> str:\n content = ''\n if self.head is not None:\n content = str(self.head)\n cur = self.head.next\n while cur is not None:\n content += ' -> ' + str(cur)\n cur = cur.next\n return 'SLL [' + content + ']'", "def __str__(self) -> str:\n content = ''\n if self.head is not None:\n content = str(self.head)\n cur = self.head.next\n while cur is not None:\n content += ' -> ' + str(cur)\n cur = cur.next\n return 'SLL [' + content + ']'", "def __str__(self):\n string = \"\"\n cur_node = self.head\n while cur_node is not None:\n string += cur_node.data.__str__()\n cur_node = cur_node.next\n return string", "def __str__(self):\n s = \"\"\n current = self.__head\n while current:\n s += str(current.data) + \"\\n\"\n current = current.next_node\n return s[:-1]", "def __str__(self):\n\n list_str = ''\n current = self.head\n while current:\n # print(current, \"current\")\n list_str += str(current.value ) + ', '\n current = current.next\n return list_str[:-2]", "def __str__(self):\n\n return LinkedList.str_recursive(self.front)", "def __str__(self):\n\n list_str = ''\n current = self.head\n while current:\n # print(current, \"current\")\n list_str += str(current.value ) + ', '\n current = current.next\n \n return list_str[:-2]", "def __str__(self) -> str:\n ret = StringBuilder(\"\")\n current = self.head\n while current:\n ret += current.info\n current = current.next\n return str(ret)", "def __str__(self):\n string = \"\"\n cur = self.__head\n while cur is not None:\n string += str(cur.data)\n cur = cur.next_node\n if cur is not None:\n string += \"\\n\"\n return string", "def linked_list_string(self, linked_list):\n\n if linked_list is not None:\n root_str = str(linked_list.val) + \" => \"\n next_str = self.linked_list_string(linked_list.next)\n\n return root_str + next_str\n return 'None'", "def __str__(self):\n result = [] \n node = self.head\n while node is not None:\n result.append(str(node.value))\n node = node.next_node \n return '[' + ', '.join(result) + ']'", "def __str__(self):\n result = [] \n node = self.head\n while node is not None:\n result.append(str(node.value))\n node = node.next_node \n return '[' + ', '.join(result) + ']'", "def __str__(self):\n out = '['\n if self.head != None:\n cur = self.head\n out = out + str(self.head)\n cur = cur.next\n while cur != None:\n out = out + ' -> ' + str(cur)\n cur = cur.next\n out = out + ']'\n return out", "def __str__(self):\n reprStr = ''\n currNode = self.head\n while currNode:\n reprStr = reprStr + str(currNode.count) + ' ' + str(currNode.data) + '\\n'\n currNode = currNode.next\n return reprStr", "def __str__(self):\n\n result = \"\"\n\n temp = self.head\n while temp is not None:\n result += str(temp.data) + \" -> \"\n temp = temp.next\n\n return result[0:-4]", "def __str__(self):\n temp = \"head\"\n temp_node = self.head\n while temp_node is not None:\n temp += f' -> {temp_node.val}'\n temp_node = temp_node.next\n temp += f'-> None'\n return temp", "def __str__(self):\n values = \"\"\n node = self.head\n while node:\n values = values + \"{} \".format(node.__str__())\n node = node.next\n return values", "def __repr__(self):\r\n return \"ListNode({})\".format(self.data)", "def __str__(self):\n next = self.next.data if self.next else \"None\"\n return \"%s->%s\" % (self.data, next)", "def __str__(self):\n elements = []\n current = self._head\n while current:\n elements.append(str(current.val))\n current = current.next\n return ' -> '.join(elements)", "def as_string(self):\n\n ll_elements = []\n current = self\n while current:\n ll_elements.append(str(current.data))\n current = current.next\n\n return \"->\".join(ll_elements)", "def __str__(self):\n\n return self._fold_loop(lambda x, y: y + \"%s -> \" % x, \"LinkedList [\",\\\n self._head) + \"None]\"", "def test_linked_list_repr_format(empty_list):\n expected = '<Linked List | Head: None | Length: 0>'\n actual = repr(empty_list)\n assert expected == actual", "def __repr__(self):\n return \"LinkedList([{}],{}/{})\".format(self.cur_node, self.cur_pos, self.length)", "def print_list(self):\n node = self.head\n\n string = '['\n while node:\n if node.next:\n string += str(node.value) + ' -> '\n else:\n string += str(node.value)\n node = node.next\n string += ']'\n return string", "def __str__(self):\n cur_node = self.head\n str_list = ['{']\n while cur_node is not None:\n str_list.append(str(cur_node))\n if cur_node is not self.tail:\n str_list.append(', ')\n cur_node = cur_node.next_node\n str_list.append('}')\n return ''.join(str_list)", "def __str__(self):\n outstr = \"-\"\n node = self.head.next\n while node != self.tail:\n outstr = outstr + str(node.element) + \"-\"\n node = node.next\n return outstr", "def __repr__(self):\n return 'LinkedList({!r})'.format(self.items())", "def test_linked_list_str_format(empty_list):\n expected = 'Head: None | Length: 0'\n actual = str(empty_list)\n assert expected == actual", "def test_linked_list_display(new_ll):\n from linked_list import Linked_List\n result = str((\"apple\", 3, \"pear\", 1, \"something\"))\n assert new_ll.display() == result", "def __repr__(self):\n\n nodes = []\n current = self.head\n\n while current:\n if current is self.head:\n nodes.append('[Head: %s]' % current.data)\n elif current.next_node is None:\n nodes.append('[Tail: %s]' % current.data)\n else:\n nodes.append('[%s]' % current.data)\n current = current.next_node\n\n return '-> '.join(nodes)", "def __repr__(self):\n nodes = []\n current = self.head\n while current:\n nodes.append(repr(current))\n current = current.next\n\n return '[' + ','.join(nodes) + ']'", "def test_list_representation(self):\n \n lr = ['- L1\\n- L2\\n- L3',\n 'text\\n- L1\\n- L2\\ntext\\n- L3',\n '* H\\n- L1\\n - L2\\n** H\\n- L3',\n ' - L1\\n - L2\\n - L3',\n '- L1\\n - L2\\n - L3'\n ]\n\n for l in lr:\n self.assertEqual(l, str(parser.parse(l)))", "def __repr__(self):\n nodes = []\n curr = self.head\n while curr:\n nodes.append(repr(curr))\n curr = curr.next\n return '[' + ', '.join(nodes) + ']'", "def __repr__(self):\n nodes = []\n curr = self.head\n while curr:\n nodes.append(repr(curr))\n curr = curr.next\n return '[' + ', '.join(nodes) + ']'", "def __str__(self):\n _str = \"\"\n current_node = self._head\n while(current_node != None):\n _str += str(current_node.value)\n _str += \" -> \"\n current_node = current_node.next\n _str += \"None\"\n return _str", "def __str__(self):\n\n string = \"\"\n\n current = self.head\n\n while current is not None:\n string += f\"{ {current.value} } -> \"\n current = current.next\n\n string += f\" None \"\n\n return string", "def state(self):\n new_list = []\n item = self.head\n while item:\n new_list.append(item.data)\n item = item.next\n return str(new_list).replace(' ', '')", "def __str__(self):\n\t\tstrBuffer = \"[\"\n\t\ttemp = self.head\n\t\tcount = 0\n\n\t\twhile temp.getNext() != None:\n\t\t\t#print temp.getData()\n\t\t\tstrBuffer += temp.getData() + \", \"\n\t\t\tcount += 1\n\t\t\tif count % 20 == 0:\n\t\t\t\tstrBuffer = strBuffer[:-1] + '\\n' \n\t\t\ttemp = temp.getNext()\n\t\t\n\t\tstrBuffer += (temp.getData() + ']') #off by one fix\n\t\t\t\n\t\treturn strBuffer", "def str_recursive(node):\n\n if node == None:\n return \"\"\n else:\n return str(node.item) + \" \" + LinkedList.str_recursive(node.next)", "def __str__(self):\n\n if self.head != None:\n return (str(self.id)+\" (\"+str(self.tail.id)+\", \"+\n str(self.head.id)+\")\")\n else:\n return str(self.id)+\" (\"+str(self.tail.id)+\", -1)\"", "def list_2_string(l, name='List'):\n buff = io.StringIO()\n print_list(l, name=name, output=buff)\n return buff.getvalue()", "def _create_list_item(self, str):\n para = nodes.paragraph()\n para += nodes.strong('', str)\n\n item = nodes.list_item()\n item += para\n\n return item", "def display(self):\n current = self.head\n result = \"(\"\n while current is not None:\n if isinstance(current.data, str):\n result = result + \"'\" + current.data + \"'\"\n else:\n result = result + str(current.data)\n if current.next_node is not None:\n result += \", \"\n current = current.next_node\n result += ')'\n print(result)\n return result", "def display(self):\n node = self.head\n display_this = []\n while node:\n display_this.append(node.data)\n node = node.next\n return str(display_this).replace(\"[\", \"(\").replace(\"]\", \")\")", "def test_graph_str():\n node_list = []\n node_list.append(Node({'A':['B','C']}))\n node_list.append(Node({'B':['C','D']}))\n node_list.append(Node({'C':['D']}))\n g = Graph(node_list)\n assert str(g) == \"[{'A':['B','C']},{'B':['C','D']},{'C':['D']}]\"", "def get_string(self):\n current = self\n l = []\n while current:\n l.append(current.value)\n current = current.next\n return \"\".join(l)", "def lstString() :\n return s.lstString()", "def lstToLinkedList(lst):\n if not lst: return\n LinkedList = Node(lst[0])\n LinkedList.next = lstToLinkedList(lst[1:])\n return LinkedList", "def __repr__(self):\n return \"{}\".format(self._head)", "def rev_list_rep(value):\n # turn it into a string\n reversed_notation = str(value)[::-1]\n list_representation = None\n last_element = None\n for c in reversed_notation:\n if list_representation is None:\n list_representation = ListNode(int(c))\n last_element = list_representation\n else:\n last_element.next = ListNode(int(c))\n last_element = last_element.next\n\n return list_representation", "def __str__(self):\n return \"->\".join([str(n.data) for n in self.as_list()])", "def __repr__(self):\n\n return \"LinkedList created\"", "def __str__(self):\n if self.next is not None:\n return (str(self.value) + ' -->')\n return str(self.value)", "def str_reverse_recur(node):\n\n if node == None:\n return \"\"\n else:\n return LinkedList.str_reverse_recur(node.next) + \" \" + str(node.item)", "def showListFromNode(self, node):\n if self.empty():\n return \"Linked List is Empty\"\n\n l = node\n while l is not None:\n print(l.data, end=\" ----> \")\n l = l.next\n print()\n return", "def __str__(self):\n return str(self.list)", "def create_linked_list(input_list):\n head=None\n for value in input_list:\n if head is None:\n head=Node(value)\n else:\n current_node=head\n while current_node.next:\n current_node=current_node.next\n current_node.next=Node(value)\n# printlist(head)\n# print('------')\n return head", "def __repr__(self):\n if self.rest is Link.empty:\n rest = ''\n else:\n rest = ', ' + repr(self.rest)\n return 'Link({0}{1})'.format(self.first, rest)", "def lstToLinkedList(lst):\n if not lst: return\n LinkedList = Node(lst[0])\n LinkedList.next = lstToLinkedList(lst[1:])\n return LinkedList", "def __repr__(self):\n ## return str(self.first) + \" -> \" + repr(self.rest)\n if self.rest is Link.empty:\n rest_str = \"\"\n else:\n rest_str = \", \" + repr(self.rest)\n return \"Link({0}{1})\".format(self.first, rest_str)", "def __repr__(self):\n if self.rest:\n rest_repr = ', ' + repr(self.rest)\n else:\n rest_repr = ''\n return 'Link({0}{1})'.format(self.first, rest_repr)", "def testStringRepresentationOnInstantiation(self):\r\n self.assertEqual(str(self.lv), 'list')", "def display(self, end=\"->\"):\n curr = self.head\n result = ''\n while curr != None:\n result += str(curr.data) + end\n curr = curr.link\n result = result.strip(end)\n print(result)", "def test_lined_list_create_with_non_iterable():\n from linked_list import Linked_List\n new_linked_list = Linked_List(-100)\n assert new_linked_list.head.value == -100", "def test_node_to_str(self):\n f = lws.node_to_str\n # normal\n assert f(('a', 'b')) == 'a: b'\n # exception\n assert f(('a',),) == \"('a',): \"\n assert f('a') == 'a: '", "def __repr__(self):\n return '<List %r>' % (self.name)", "def __str__(self):\n result = \"\"\n current = self.__head\n while current is not None:\n result = result + \" \" + str(current.getPayload())\n current = current.getNext()\n\n return result", "def printList(head):\n print(deconstructList(head))", "def html_list_item(string):\n if string == \"\":\n return \"\"\n\n return \"<li>\" + string + \"\\n\"", "def __init__(self, head: ListNode):\n self.head = head\n self.list = []\n while head:\n self.list.append(head.val)\n head = head.next", "def __str__(self):\n return \"({},{},{})\".format(self.tail, self.head, self.weight)", "def list_to_str( L ):\n if len(L) == 0: return ''\n return L[0] + list_to_str( L[1:] )", "def list_to_str( L ):\n if len(L) == 0: return ''\n return L[0] + list_to_str( L[1:] )", "def list_print(self):\n node = self.cur_node # cant point to ll!\n while node:\n print(node.data)\n node = node.next", "def test_ordered_lists(self):\n\n list_str = '1. One'\n \n doc = parser.parse(list_str)\n self.assertEqual(len(doc.children()), 1)\n\n ol = doc.children()[0]\n self.assertTrue(isinstance(ol, parser.ListNode))\n\n self.assertEqual(str(doc), list_str)\n\n list_str = '- One\\n 1. OneOne\\n 2. OneTwo'\n\n doc = parser.parse(list_str)\n self.assertEqual(len(doc.children()), 1)\n\n ul = doc.children()[0]\n self.assertEqual(len(ul.children), 1)\n\n li = ul.children[0]\n ol = li.children[0]\n\n self.assertEqual(len(ol.children), 2)", "def __addList(self, node, name):\n\n if name == \"OrderedList\":\n listId = self.letter.addList(List.ARABIC)\n else:\n listId = List.BULLETED\n self.pieces.append(f\"{{\\\\li580{{\\\\ls{listId:d} \")\n for item in node.findall(\"ListItem\"):\n self.pieces.append(self.__getText(item).strip())\n self.pieces.append(\"\\\\par\\n\")\n self.pieces.append(\"}}\\n\\\\par\\n\")", "def __init__(self, head: ListNode):\n self.l = []\n while head:\n self.l.append(head.val)\n head = head.next", "def listToString(L):\r\n S = ''\r\n for x in L:\r\n S += str(x)\r\n return S", "def __repr__(self):\n if self.size == 0:\n return \"DLL=[]\"\n\n str_repr = \"DLL=[\"\n current_node = self.head\n\n while current_node is not None:\n str_repr += f\"{current_node} -> \"\n current_node = current_node.next\n\n str_repr = f\"{str_repr[:-4]}]\"\n return str_repr", "def print_nodes(self, s):\n\t\treturn ' '.join((s.replace(\"\\n\",\" \")).split())", "def __repr__(self):\n string = ''\n seen = set()\n curr = self.head\n while curr:\n if curr in seen:\n string += 'loop: ' + str(curr)\n break\n else:\n string += str(curr)\n seen.add(curr)\n return string", "def test_node_str():\n node_a = Node({'name':['list','of','vertex']})\n assert str(node_a) == \"{'name':['list','of','vertex']}\"", "def listToString(s):\n # initialize an empty string\n str1 = \"\"\n\n # traverse in the string\n for ele in s:\n try:\n str1 = str1 + \" \" + ele\n except:\n pass\n\n # return string\n return str1", "def printList(self): \r\n aux = self.head \r\n while(aux): \r\n print(aux.data , end = ' ') \r\n aux = aux.next", "def __str__(self):\n\n final_string = \"\"\n\n current = self.front\n\n while current:\n final_string += f\"{{{current.value}}} -> \"\n current = current.next\n\n return f\"{final_string}NULL\"", "def display(self):\n res = \"(\"\n curr = self.head\n while curr:\n val = curr.val\n if type(val) is str:\n val = \"'\" + val + \"'\"\n else:\n val = str(val)\n res += val\n if curr.next:\n res += ', '\n curr = curr.next\n return res + ')'", "def __str__(self):\n stringRepresentation = []\n for node in self.getNodes():\n stringRepresentation.append(\"->\".join(\n (str(node), str(self.graph[node]))))\n\n return str(stringRepresentation)", "def str_first_at_end(self):\n outstr = \"-\"\n node = self.head.next\n while node != self.tail:\n outstr = \"-\" + str(node.element) + outstr\n node = node.next\n return outstr", "def as_string(self):\n\n\t\tout = []\n\t\tn = self\n\n\t\twhile n:\n\t\t\tout.append(str(n.data))\n\t\t\tn = n.next\n\n\t\treturn \"\".join(out)", "def rest(s):\n assert is_link(s), 'rest only applies to a linked list.'\n assert s != empty, 'empty linked list has no rest.'\n return s[1]", "def __str__(self):\n return list.__str__(self)", "def __str__(self) -> str:\n\t\treturn get_node_description(self.name)", "def __repr__ (self):\n\t\ts=[];add=s.append\n\t\tfor item in self.data:\n\t\t\tadd (str(item))\n\t\treturn join (s, '\\n')", "def __str__(self):\n s = \"--\\n\"\n for node in self:\n s += node.__str__() + \"\\n\"\n return s + \"--\"", "def simple_ll():\n ll = LinkedList()\n ll.push(20)\n ll.push(4)\n ll.push(15)\n ll.push(85)\n return ll", "def rest(s):\n assert is_link(s), 'rest only applies to linked list.'\n assert s != empty, 'empty linked list has no rest.'\n return s[1]", "def __str__(self):\n c = self\n ans = \"[\"\n while c:\n ans += \".\"\n c = c.next\n return ans + \"]\"" ]
[ "0.72847897", "0.7202857", "0.71419424", "0.71419424", "0.6963336", "0.69505835", "0.6936421", "0.6885288", "0.6866368", "0.6856697", "0.68190056", "0.67749923", "0.6762058", "0.6762058", "0.67520875", "0.6743596", "0.66924685", "0.6669105", "0.6654589", "0.6606938", "0.6560022", "0.6511865", "0.6468755", "0.6464713", "0.6442689", "0.64139277", "0.6411162", "0.6406209", "0.6403576", "0.6359416", "0.6328016", "0.632696", "0.6287373", "0.6280289", "0.62791705", "0.6278681", "0.6278681", "0.6203534", "0.6195911", "0.618485", "0.6178287", "0.61336875", "0.6095179", "0.60730827", "0.6035994", "0.60249585", "0.6016442", "0.6011246", "0.5971213", "0.594093", "0.5928494", "0.5919194", "0.5918372", "0.59159005", "0.59054947", "0.58610487", "0.5855273", "0.58518696", "0.58440155", "0.58285904", "0.57940334", "0.5779338", "0.57628745", "0.5759192", "0.5742771", "0.57394975", "0.5737833", "0.5736333", "0.5725335", "0.570684", "0.5703999", "0.56964535", "0.5689661", "0.5681386", "0.56771225", "0.56771225", "0.56654876", "0.56551194", "0.5641945", "0.5627942", "0.5622695", "0.5618665", "0.56165427", "0.5609534", "0.5603026", "0.5598816", "0.5584181", "0.55840415", "0.55657583", "0.5564214", "0.5555131", "0.55427474", "0.5534985", "0.5533765", "0.55311376", "0.5526101", "0.5522812", "0.5501113", "0.5497024", "0.5475951" ]
0.6343675
30
Gets the number of nodes of the linked list
def length(self): return self.size
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_length(self):\n pointer = self.head\n counter = 0\n while pointer:\n counter += 1\n pointer = pointer.next_node\n return counter", "def size(self):\n\n count = 0\n curr_node = self.head\n while curr_node is not None:\n curr_node = curr_node.next_node\n count += 1\n\n return count", "def size(self):\n count = 0\n current = self.head\n if self.head is None:\n return 0\n while current.next_node is not None:\n count += 1\n current = current.next_node\n return count + 1", "def size(self):\n current = self.__head\n count = 0\n while current:\n count += 1\n current = current.next_node\n return count", "def count(self):\n node = self.head\n i = 0\n while node:\n i += 1\n node = node.next\n\n return i", "def length(self):\r\n current_node = self.head\r\n size = 0\r\n while current_node.next!=None:\r\n size += 1\r\n current_node = current_node.next\r\n return size", "def size(self):\n size = 0 \n node = self.head \n while node is not None:\n size += 1 \n node = node.next_node \n return size", "def size(self):\n size = 0 \n node = self.head \n while node is not None:\n size += 1 \n node = node.next_node \n return size", "def get_length(self):\n current_node = self.head\n if current_node:\n i = 1\n while current_node.next:\n current_node = current_node.next\n i += 1\n return i\n else:\n return 0", "def size(self):\n size = 0\n node = self.head\n while node:\n size += 1\n node = node.next\n return size", "def size(self):\n\n count = 0\n\n temp = self.head\n while temp is not None:\n count += 1\n temp = temp.next\n\n return count", "def len(self):\n count = 0\n temp = self.head\n while temp.next!=None:\n count += 1\n temp = temp.next\n return(count)", "def size(self):\n current = self.head\n counter = 0\n while current is not None:\n counter += 1\n current = current.next\n return counter", "def size(self):\n if self.empty():\n count = 0\n else:\n n = self.head\n count = 1\n while n.next is not None:\n count += 1\n n = n.next\n return count", "def len(self):\n start = self.head\n count = 0\n while start:\n count+=1\n start = start.getLink()\n return count", "def get_count(self):\n count = 0\n temp = self.head\n while temp:\n count += 1\n temp = temp.next\n return count", "def size(self):\n traverse = self.head\n count = 0\n while traverse.next != None:\n traverse = traverse.next\n count += 1\n return count + 1", "def size(self):\n traverse = self.head\n count = 1\n while traverse.next != None:\n traverse = traverse.next\n count += 1\n return count", "def get_length(self):\n curr = self.head\n length = 0\n\n while curr != None:\n length += 1\n curr = curr.link\n\n return length", "def length(self):\n if self.head:\n count = 1\n current = self.head\n while(current.next != self.head):\n\tcount+=1\n\tcurrent = current.next\n return count\n else:\n return 0", "def count(self):\n\t\treturn len(list(self.nodes))", "def size(self):\n \"\"\"\n :type None\n :rtype int\n \"\"\"\n curr = self.head\n count = 0\n while curr:\n count += 1\n curr = curr.getNext()\n \n return count", "def size(self):\n traverse = self.head\n\n if self.head == None:\n return 0\n size = 1\n while traverse.next != None:\n traverse = traverse.next\n size += 1\n return size", "def node_count(self):\n return self._node_count", "def get_num_nodes(self):\n return len(self._nodes)", "def get_num_nodes(self):\n return len(self._nodes)", "def length(self, head):\n count = 0\n while head:\n count += 1\n head = head.next\n return count", "def length(self): # Class O(n)\r\n h = self.head\r\n size = 1\r\n while 'next' in dir(h.next):\r\n size += 1\r\n h = h.next\r\n return size", "def numNodes(self):\n res = 0\n for n in self.iternodes():\n res += 1\n return res", "def NodesCount(self):\n return len(self.nodes)", "def number_of_nodes(self) -> int:\n return self.graph.number_of_nodes()", "def __len__(self):\n if not self.head:\n return 0\n if not self.head.next:\n return 1\n N, tort, hare = 1, self.head.next, self.head.next.next\n while tort and (tort is not hare):\n N += 1\n tort = tort.next\n if hare and hare.next:\n hare = hare.next.next\n return N", "def count_nodes(self):\n\t\treturn self.__count_nodes(self)", "def number_of_nodes(self):\n return int(self._data['number_of_nodes'])", "def num_nodes(self):\n return len(self.nodes)", "def node_count(self) -> int:\n return pulumi.get(self, \"node_count\")", "def Nnodes(self):\n return len(self.nodes)", "def count(self):\n return self.__tree.node_count", "def node_count(self):\n return self._root.count()", "def get_length(head):\n c = 0\n while head:\n c += 1\n head = head.next\n return c", "def node_count(self):\n if self.value:\n cnt = 0\n else:\n left_cnt = self.left.node_count()\n right_cnt = self.right.node_count()\n cnt = 1 + left_cnt + right_cnt\n return cnt", "def get_node_count(self) -> Iterable:\n return len([i for i in self.all_nodes_as_iterable()])", "def length(self):\n return self.linked_list.length()", "def num_nodes(self):\n return len(self._node_reg)", "def size(self):\n\t\treturn len(self.nodes)", "def node_count(self) -> int:\n return int(self.graph_tuple_stats.node_count or 0)", "def num_nodes(self) -> int:\n return pulumi.get(self, \"num_nodes\")", "def _num_nodes(self):\n return int(self._node_map[-1])", "def size(self):\n return self._linkedlist.size()", "def size(self):\n\n size = 1\n traverse = self.front\n if self.front == None:\n return 0\n\n while traverse.next != None:\n traverse = traverse.next\n size += 1\n return size", "def size(self):\n\n size = 1\n traverse = self.front\n if self.front == None:\n return 0\n\n while traverse.next != None:\n traverse = traverse.next\n size += 1\n return size", "def numNodes(T):\r\n n = 1\r\n if T.isLeaf:\r\n return n\r\n for i in range(len(T.child)):\r\n n += numNodes(T.child[i])\r\n return n", "def node_count(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"node_count\")", "def node_count(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"node_count\")", "def node_count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"node_count\")", "def node_count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"node_count\")", "def num_of_node(self):\n \n try:\n return self.num_node\n except:\n print(\"ERROR: No graph exists\")", "def _num_nodes(self):\n return len(self._nid2partid)", "def count_nodes(self):\n if self.children is None:\n return 0\n\n total_count = 0\n for child in self.children:\n if child is None:\n return 0\n child_count = child.count_nodes()\n total_count = total_count + child_count\n\n return total_count+1", "def __len__(self) -> int:\n return len(self.nodes)", "def __len__(self) -> int:\r\n return len(self._nodes)", "def get_list_length(self):\n n = 0\n l = self\n while l.is_block():\n n+=1\n l = l.field(1)\n return n", "def number_of_nodes(self, ntype: str = None) -> int:\n return self.num_nodes(ntype)", "def compute_num_nodes(graph):\n return len(graph.keys()) # return the number of nodes in the graph", "def count_nodes(self):\n if self.is_empty():\n return 0\n elif self.is_leaf():\n return 1\n else:\n if self.get_left():\n if self.get_right():\n return 1 + self.get_left().count_nodes() + self.get_right().count_nodes()\n else:\n return 1 + self.get_left().count_nodes()\n else:\n return 1 + self.get_right().count_nodes()", "def num_nodes(self):\n return len(self.successors)", "def lengthnodelist(self, nl_p=None):\n if not nl_p:\n nl_p = self.getnetnodes()\n # (const nodelist_bn* nodes)\n cnetica.LengthNodeList_bn.argtypes = [c_void_p]\n cnetica.LengthNodeList_bn.restype = c_int\n return cnetica.LengthNodeList_bn(nl_p) # nnodes", "def node_count(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"node_count\")", "def lsCountNode(self):\n ret = libxml2mod.xmlLsCountNode(self._o)\n return ret", "def size(self):\r\n if self.head is None:\r\n return None\r\n shape = 0\r\n node = self.head\r\n while node:\r\n shape += 1\r\n node = node.next\r\n return shape", "def __len__(self):\n return self.count_of(CUBA.NODE)", "def get_num_nodes(self):\n\n return sum(self.topology)", "def __len__(self):\n return len(self.nodes)", "def count(self):\r\n return self.count_helper(self.top_node)", "def number_of_data_nodes(self):\n return int(self._data['number_of_data_nodes'])", "def node_count(self):\n return self.process_tree.get_descendant_count() + 1", "def Test_NumNodes(Graph_MD):\n N_Knoten = Graph_MD.number_of_nodes()\n \n return N_Knoten", "def num_nodes(self) -> Optional[int]:\n return pulumi.get(self, \"num_nodes\")", "def size(self):\n try:\n return len(self._adjacency_list)\n except Exception as error:\n print(f'An error occurred: {error}')", "def __len__(self):\n return len(self.node)", "def get_node_count(self) -> Iterable:\n return self._g.V().count().toList()[0]", "def nodeCount(eval):\n if not isEvaluator(eval):\n return 0\n return eval.ReferencedNodes().Size()", "def node_count(self, *n_labels):\n if not n_labels:\n return len(self._nodes)\n elif len(n_labels) == 1:\n return len(self._nodes_by_label.get(n_labels[0], ()))\n else:\n return sum(1 for _ in self.nodes(*n_labels))", "def node_count(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"node_count\")", "def num_nodes(self) -> Optional[int]:\n return super().num_nodes", "def getNNodesTot(self):\n nNodesTot = 0\n for iElt in Elements._all:\n nNodesTot += len(iElt.coord)\n return nNodesTot", "def num_nodes(self):\n return ((len(self.tensor_u)+1) * (len(self.tensor_v)+1) *\n (len(self.tensor_w)+1))", "def num_nodes(self, ntype: str = None) -> int:\n if ntype:\n return self.num_nodes_dict[ntype]\n else:\n return self.total_number_of_nodes", "def total_nodes(self)->int:\n\t\tqueue=[]\n\t\tsum=0\n\t\tqueue.append(self)\n\t\twhile(len(queue)>0):\n\t\t\tnode=queue.pop(0)\n\t\t\tsum+=1\n\t\t\tif(node.right!=None):\n\t\t\t\tqueue.append(node.right)\n\t\t\tif(node.left!=None):\n\t\t\t\tqueue.append(node.left)\n\t\treturn sum", "def num_nodes(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"num_nodes\")", "def count(self):\n\n count = 0\n x = self.begin\n\n if self.begin == self.end == None:\n return 0\n\n elif self.begin == self.end:\n return 1\n\n else:\n while x:\n count += 1\n x = x.next\n\n return count", "def number_internal_links(self, node_list):\n n_links = self.internal_adjacency(node_list).sum()\n if self.directed:\n return n_links\n else:\n return n_links // 2", "def deep_len(lnk):\n \"*** YOUR CODE HERE ***\"\n if lnk == Link.empty:\n return 0\n elif type(lnk) is not Link:\n return 1\n else:\n return deep_len(lnk.first) + deep_len(lnk.rest)", "def size(self):\n count = 0\n current = self.front\n\n while current is not None:\n current = current.getPtr()\n count += 1\n\n return count", "def num_links(self):\n count=0.0\n for cluster in self.clusters:\n if self.clusters[cluster] == self.clusters[cluster].antecessor:\n numberofmembers=self.clusters[cluster].number_of_members\n count+=numberofmembers\n return count", "def num_nodes(self):\n if not self._tensors_defined():\n return None\n nodes = (\n (len(self.tensor_u) + 1)\n * (len(self.tensor_v) + 1)\n * (len(self.tensor_w) + 1)\n )\n return nodes", "def length(self):\n return self.list.length", "def length(self):\n return self.list.length", "def get_tree_size(cur):\n sql = \"\"\"\n SELECT\n COUNT(*)\n FROM\n nodes;\n \"\"\"\n cur.execute(sql)\n result = cur.fetchone()\n return result['count']", "def count(self):\n return len(self.__links)", "def head_count(self):\n return self._internal.get_head_count()" ]
[ "0.8711247", "0.8629149", "0.8546763", "0.8544969", "0.85332114", "0.8501063", "0.84799534", "0.84799534", "0.8410636", "0.8409078", "0.84006345", "0.83499175", "0.83401936", "0.83310866", "0.8329662", "0.8320223", "0.8318683", "0.82849824", "0.828266", "0.82563204", "0.8193696", "0.81728727", "0.81400955", "0.8103241", "0.8057471", "0.8057471", "0.80572844", "0.80438095", "0.8029507", "0.80194825", "0.80004", "0.797616", "0.7956126", "0.78846025", "0.78591174", "0.7841742", "0.783575", "0.78072286", "0.7788644", "0.7768658", "0.7767334", "0.7760861", "0.77043015", "0.7615907", "0.7613184", "0.76050645", "0.7593476", "0.758811", "0.7585791", "0.75485986", "0.75485986", "0.7527572", "0.7518882", "0.7518882", "0.7512456", "0.7512456", "0.7509023", "0.7508534", "0.7482977", "0.74813133", "0.7472731", "0.7428019", "0.73758054", "0.73721015", "0.73470736", "0.73458976", "0.73449314", "0.7329134", "0.73276776", "0.7277336", "0.7274363", "0.7269835", "0.7261785", "0.724739", "0.7242348", "0.7201132", "0.7183905", "0.71483517", "0.71454304", "0.7140382", "0.7136407", "0.71234274", "0.70920974", "0.7091928", "0.7051453", "0.7043309", "0.70386356", "0.7034663", "0.7029079", "0.7009795", "0.6996733", "0.69888335", "0.69592583", "0.6951535", "0.69434035", "0.6928463", "0.6918798", "0.6918798", "0.6893425", "0.68921006", "0.6877616" ]
0.0
-1
Determines if the linked list is empty
def is_empty(self): return self.size == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_empty(self):\n return self.linked_list.length() == 0", "def is_empty(self):\n\n if self.head == None:\n return True\n else:\n return False", "def is_empty(self):\n\n if self.head == None:\n return True\n else:\n return False", "def is_empty(self):\n if self.head == None:\n return True\n else:\n return False", "def is_empty(self):\n if self.head is None:\n return True\n else:\n return False", "def is_empty(self):\n return self.head is None", "def is_empty(self):\n # This is worse style: \"return self._head == None\"\n return self._head is None", "def is_empty(self):\n\n return self.head is None", "def isEmpty(self):\n if self.head.next == None:\n return True\n return False", "def empty(self):\n return self.head == None", "def empty(self):\n if self.head is None:\n return True\n return False", "def is_empty(self):\n current = self.head\n\n if current.next == self.tail:\n return True\n else:\n return False", "def is_empty(self):\n return self._head is self._tail is None", "def isEmpty(self):\n return self.head == None", "def empty(self) -> bool:\n return True if self.head is None else False", "def is_empty(self):\n return self.list.length == 0", "def is_empty(self):\n return self.list.length == 0", "def is_empty(self):\n return self.list_size == 0", "def is_empty(self):\n if len(self.list) == 0:\n return True\n return False", "def is_empty(self):\n # TODO: Check if empty\n return self.list == []", "def is_empty(self):\n return self.list.is_empty()", "def isEmpty(self):\n return self.head == None and self.tail == None", "def is_empty(self):\n # TODO: Check if empty\n return self.list.is_empty()", "def is_empty(self):\n return len(self.__nodes) == 0", "def is_empty(self) -> bool:\n return self.peek(1) == []", "def isEmpty(self):\n \"\"\"\n :type None\n :rtype Boolean\n \"\"\"\n return self.head == None", "def empty(self):\n if len(self.list_x) == 0:\n return True\n else:\n return False", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n\t\treturn (self._size == 0)", "def is_empty(self):\n\t\treturn self._size == 0", "def is_empty(self):\n return self.__size == 0", "def is_empty(self):\n return self.size() == 0", "def is_empty(self):\n return self.size() == 0", "def is_empty(self) -> bool:\r\n return self.size == 0", "def _is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.first is None", "def is_empty(self) -> bool:\n return self._items == []", "def is_empty(self):\n if self.size() == 0:\n return True\n else:\n return False", "def is_empty(self):\n return self._first is None", "def is_empty(self):\n return self._items == []", "def is_empty(self):\n return self.count == 0", "def is_empty(self):\n return self.count == 0", "def is_empty(self):\n\n if self.size() == 0:\n return True\n else:\n return False", "def is_empty(self):\n return self.size == []", "def is_empty(self):\n return self.items == []", "def is_empty(self) -> bool:\n return self._first is None", "def is_empty(self):\n return not self.size()", "def is_empty(self):\n length = len(self.items)\n if length != 0:\n return False\n else:\n return True", "def is_empty(self):\n return len(self.items) == 0", "def is_empty(self):\r\n\r\n return self._size == 0", "def is_empty(self) -> bool:\n if self.num_items == 0:\n return True\n else:\n return False", "def is_empty(self):\r\n if self.size == 0:\r\n return True\r\n return False", "def is_empty(self):\r\n return self._size == 0", "def is_empty(self):\r\n return self._size == 0", "def IsEmpty(self):\n\t\treturn self.first == None", "def is_list_empty(list):\n if not list:\n return True\n else:\n return False", "def is_empty(self):\n return self.n==0", "def its_empty(self) -> bool:\n return self.items == []", "def is empty(self):\n return len(self) == 0", "def is_empty(self) -> bool:\n return self._size == 0\n # return self._items == []\n # Note: self._items == [] is faster than len(self._items) == 0\n # in general. Why?\n # Think about what happens when len(self._items) is called\n # on a list of 100,000 items.", "def is_empty(self):\n return len(self._items) == 0", "def is_empty(self):\n return len(self._items) == 0", "def empty(self) -> bool:\r\n return len(self.items) == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self) -> bool:\n return len(self._items) == 0", "def is_empty(self):\n return self.__len__() == 0", "def is_empty(self):\n\n return self.items == []", "def is_empty(self):\n\n return self.items == []", "def empty(self) -> bool:", "def is_empty(self):\n if len(self.items) == 0:\n return True\n else:\n return False", "def is_empty(self):\n\n return self.count == 0", "def empty(self):\n return self.size() == 0", "def empty(self):\n if len(self.List_store) == 0:\n return True\n return False", "def is_empty(self):\r\n return len(self) == 0", "def empty(self) -> bool:\n return self.size == 0", "def is_empty(self):\n if self.front == None:\n return True\n else:\n return False", "def is_empty(self) -> bool:\n return self.heap.length() == 0", "def is_empty(self) -> bool:\n return self.heap.length() == 0", "def is_empty(self):\n\n if self.front == None:\n return True\n else:\n return False", "def is_empty(self):\n return len(self.top) == 0", "def is_Empty(self):\n return self.size == 0" ]
[ "0.89005077", "0.86401933", "0.86401933", "0.86349124", "0.85868573", "0.8544384", "0.85096204", "0.84913164", "0.8477131", "0.84724826", "0.8409223", "0.83905214", "0.8383425", "0.83583224", "0.8358173", "0.834816", "0.834816", "0.8316606", "0.8197637", "0.8186564", "0.8103742", "0.8074849", "0.80355036", "0.79258084", "0.7923385", "0.7904237", "0.7884335", "0.7808096", "0.7801842", "0.77939796", "0.7775719", "0.77596587", "0.77596587", "0.77457756", "0.7744983", "0.773301", "0.7724182", "0.772182", "0.77198166", "0.77129316", "0.76978344", "0.76978344", "0.76944596", "0.76936185", "0.7693065", "0.7685901", "0.76831686", "0.76646507", "0.7661675", "0.76551944", "0.76541716", "0.7648089", "0.7642203", "0.7642203", "0.7639517", "0.76364887", "0.7629087", "0.76264375", "0.7621468", "0.76184237", "0.7617055", "0.7617055", "0.76140463", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.76019263", "0.7601336", "0.75970054", "0.75966376", "0.75966376", "0.7595825", "0.7591809", "0.7578711", "0.7573902", "0.7565013", "0.75532144", "0.75402653", "0.75384873", "0.7534148", "0.7534148", "0.7532188", "0.7529327", "0.75293094" ]
0.7669198
50
Gets the first value of the list
def front_value(self): if self.is_empty(): return None return self.head.value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def first(items):\r\n return items[0]", "def first(xs):\n if not xs:\n return None\n return xs[0]", "def first(xs):\n if not xs:\n return None\n return xs[0]", "def peek_first(self):\n if self.is_empty(): raise RuntimeError(\"Empty list\")\n return self.head.data", "def first(l):\n return next(iter(l), None)", "def first_value(self):\n if not self.is_empty():\n return self.data[self.head]\n return None", "def first(self):\n if self.is_empty():\n raise Empty('list is empty')\n return self._head._element # front aligned with head of list", "def first(self):\n if self.is_empty():\n raise Empty('list is empty')\n return self._head._next._element # front aligned with head of list", "def get_first_item(checklist):\r\n return checklist['items'][0]", "def first(self):\n return self.head and self.head.value or None", "def getfirst(self, key, default=None):\n \n values = self.getlist(key)\n return values[0] if values else default", "def first(self):\n if self.is_empty():\n raise Empty(\"List is empty!\")\n return self._header._next._element", "def first(items):\n return next(iter(items or []), None)", "def first(x):\n try:\n x = x.to_series()\n except AttributeError:\n pass\n return list(x)[0]", "def getfirst(s):\n return s[0] if isinstance(s, list) else s", "def get_first_item(videos):\n\n return next(iter(videos or []), None)", "def get_first(self):\n return self.A[1][0] if self.n > 0 else None", "def hd(lst):\n return lst[0] if lst else None", "def first(collection):\n return next(iter(collection))", "def first(collection):\n return next(iter(collection))", "def first_value(self):\n return self._value", "def extract(l):\n if l is None: return None\n if len(l) > 1:\n raise ValueError('More than 1 Value')\n try:\n return l[0]\n except IndexError:\n return None", "def first(self):\n if self.is_empty():\n raise Empty('La cola está vacía')\n return self._head._element # frente alineado con la cabeza de la lista", "def firstElement(self):\n return self.top()", "def first(self):\n return self.__head", "def get_first_element(dataset):\n return dataset.first()", "def first(self):\r\n return self.__head", "def first(seq):\n return next(iter(seq))", "def get_value(_list, _index):\n # print(_list, len(_list))\n if _index >= len(_list):\n return None\n return _list[_index]", "def return_first(x):\r\n if x == []:\r\n return ''\r\n else:\r\n return x[0]", "def first(s):\n assert is_link(s), 'fist only applies to a linked list.'\n assert s != empty, 'empty linked list has no first element.'\n return s[0]", "def first(self):\n return self._reduce_for_stat_function(F.first, only_numeric=False)", "def get_minimum_value_from_list(self, list_):\r\n return min(list_)", "def first(self):\n if self.head is None:\n raise Exception(\"nothing in queue\")\n return self.head.value", "def get_first(self):\n raise NotImplementedError(\"get_first: You should have implemented this method!\")", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._head._element # front aligned with head of list", "def first(pair):\n\treturn pair[0]", "def first(data, key):\n for i in data:\n if key(i):\n return i\n return None", "def first(s):\n assert is_link(s), 'first only applies to linked lists.'\n assert s != empty, 'empty linked list has no first element.'\n return s[0]", "def first_value(self):\n return self._start", "def first(s):\n assert is_link(s), \"first only applies to linked lists.\"\n assert s != empty, \"empty linked list has no first element.\"\n return s[0]", "def find_min(list):\n return find_value_at(list, -1)", "def first(self): #TODO\r\n result = []\r\n for x in self.first_lookup(self.initialsymbol):\r\n result += x.first()\r\n if len(result) == 1:\r\n return result[0]\r\n return Choice(result)", "def first(self):\n return _(self._[0])", "def first_value(self):\n return self.samples[0]", "def get_first(self, buf=None):\n err = _ffi.new(\"JError_t *\")\n if buf is None:\n buf = StringCache.acquire(self._max_len)\n p = _cjudy.JudySLFirst(self._array[0], buf, err)\n return self._cast(buf, err, p)", "def first(self):\n return self.deque[0]", "def get_first_item(cls):\n return cls.query.first()", "def first(s):\n assert is_link(s),\"first only applies ti linked lists.\"\n assert s != empty, \"empty linked list has no first element.\"\n return s[0]", "def peek(self):\n return self.list.head", "def take_first(info):\n return info[0]", "def first(self):\n try:\n return self.next()\n except StopIteration:\n return None", "def min(l):\n if l:\n s_list = sorted(l)\n return s_list[0]\n else:\n raise ValueError(\"list empty\")", "def peek(self):\n if self.is_empty():\n return None\n return self.list.head.data", "def _get_list_value(index, array):\r\n if len(array) == 0:\r\n return None\r\n elif index >= 0 and index < len(array):\r\n return array[index]\r\n return array[index % len(array)]", "def first(iterable: t.Iterable[T]) -> T:\n return next(iter(iterable))", "def first(self) -> Element:\n return typing.cast(Element, self[0])", "def first(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Queue is empty')\n\t\treturn self._head._element", "def first(self):\r\n if self.head == None: #check if first(head) node is empty\r\n return 'null' #if yes, then return null\r\n else: #if it is not empty\r\n return self.head.data #return the data of head node\r", "def first(self):\n if self.is_empty():\n raise Empty(\"Queue undeflow.\")\n return self._head._element", "def first_value(self):\n return self._waveforms[0].first_value", "def first(self):\r\n if self.is_empty():\r\n raise Empty(\"Queue is empty\")\r\n return self._head._element", "def get_first(self):\n if self.is_empty():\n raise self.NoSuchNodeException()\n\n return self.head.data", "def peek(self):\n if self.count() <= 0:\n raise ValueError('Cannot peek at value that does not exist')\n return self.items[1]", "def value_from_list(key, values, by_first=False):\n i, j = (1, 0,) if not by_first else (0, 1,)\n for elm in values:\n if elm[i] == key:\n return elm[j]\n return None", "def first(seq):\n try: # try iterator interface\n return seq.next()\n except AttributeError:\n pass\n try: # seq is no iterator, try indexed lookup\n return seq[0]\n except IndexError:\n pass\n raise TypeError(\n \"Argument to `first()` method needs to be iterator or sequence.\")", "def one(self):\n return next(iter(self), None)", "def peek(self):\n if self.is_empty():\n return None\n list_length = len(self.list) - 1\n return self.list[list_length]", "def min(self):\n return self.get_first()", "def head(self) -> object:\n if not self._head:\n raise EmptyListException(\"The list is empty.\")\n return self._head", "def head(array) -> T:\n return array[0]", "def first(self) -> Optional[T]:\n if len(self.entry_finder) == 0:\n return None\n for (_, _, (item,)) in self.priority_queue:\n if item is not None:\n return cast(T, item)\n return None", "def first(self):", "def first(self):\n if self.head:\n self.cursor = self.head\n return self.cursor\n return None", "def first(self):\n return self.begin and self.begin.value or None", "def take_min(self):\n return self.get_first()", "def peek(self):\n\n if self.is_empty():\n return None\n\n return self._list[-1]", "def first(self):\n if self.is_empty():\n raise Empty(\"Queue is empty.\")\n head = self._tail._next\n return head._element", "def first_value(self):\n if hasattr(self, '_m_first_value'):\n return self._m_first_value if hasattr(self, '_m_first_value') else None\n\n self._m_first_value = self.first_value_raw.value\n return self._m_first_value if hasattr(self, '_m_first_value') else None", "def first_value(self):\n if hasattr(self, '_m_first_value'):\n return self._m_first_value if hasattr(self, '_m_first_value') else None\n\n self._m_first_value = self.first_value_raw.value\n return self._m_first_value if hasattr(self, '_m_first_value') else None", "def first(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n return self._data[self._front]", "def peek_front(self):\n\n if self.items:\n return self.items[0]\n return None", "def return_first_item(func):\n\n # Define the wrapper function.\n def wrapper(self, *args, **kwargs):\n\n # Execute the decorated method with the provided arguments.\n result = func(self, *args, **kwargs)\n\n # If the function returned a result and that result is a list then\n # return the first item on that list.\n if result and isinstance(result, list):\n result = result[0]\n\n return result\n\n return wrapper", "def first_value(self):\n return 0", "def first(self):\n try:\n data = self.get_cursor()[0]\n return self.from_(**self.prepare_data(data))\n except IndexError:\n return None", "def _first(self, \n iterable, \n condition=lambda x: True):\n try:\n return next(x for x in iterable if condition(x))\n except:\n return None", "def _tryGet(self, list, index, default):\n\t\tif (list and (len(list) > index)):\n\t\t\treturn list[index]\n\t\telif True:\n\t\t\treturn None", "def first(l: iter, predicate):\n for ele in l:\n if predicate(ele):\n return ele\n raise RuntimeError(\"Found nothing to match predicate\")", "def peek(self):\n pop = self.list_x[0]\n return pop", "def peek(self):\n return self.list.head.data", "def get_first(self):\n for u in self.user_order:\n if self.user_skip[u] == 0:\n return self.user_queue[u][0].obj\n return None", "def first(sequence, default=Ellipsis):\n if default is Ellipsis:\n return next(iter(sequence))\n else:\n return next(iter(sequence), default)", "def first(self, trace):\n return trace[0]", "def find_first(item, vec):\n @jit # Numba jit uses C-compiled version of the code in this function\n def find_first_iter(item,vec):\n for v in range(len(vec)):\n for i in item:\n if i == vec[v]:\n return v\n\n @jit\n def find_first_sing(item,vec):\n for v in range(len(vec)):\n if item == vec[v]:\n return v\n\n\n if isinstance(item,(tuple,list)):\n return find_first_iter(item,vec)\n else:\n return find_first_sing(item,vec)", "def GetFirstVisibleItem(self):\r\n\r\n return self.GetNextVisible(self.GetRootItem())", "def first(self):\n if self.ordered:\n queryset = self\n else:\n self._check_ordering_first_last_queryset_aggregation(method=\"first\")\n queryset = self.order_by(\"pk\")\n for obj in queryset[:1]:\n return obj", "def getFirst(self, t):\n index = self._findFirst(t)\n if index >= 0:\n return self.jobs[index]\n else:\n return None", "def _get_first_element(cls, d):\n\n t = np.where(d[:, 2] > 0)[0]\n if len(t):\n return d[t[0], 0], d[t[0], 1], t[0]\n return None, None, None", "def getFirstData(self) -> ghidra.program.model.listing.Data:\n ...", "def _first_raw_value(self, *fields):\n aggregate = self._first_raw_aggregate(*fields)\n if aggregate:\n return aggregate[0]" ]
[ "0.8075285", "0.7880562", "0.7880562", "0.78380233", "0.7835295", "0.7812245", "0.7767049", "0.7702108", "0.76995987", "0.7623594", "0.7603262", "0.7571921", "0.7567571", "0.730875", "0.7265243", "0.72589153", "0.7241536", "0.720047", "0.7198273", "0.7198273", "0.7179391", "0.70894885", "0.70819867", "0.7078718", "0.70392454", "0.7030367", "0.7001408", "0.69413996", "0.69403434", "0.6934092", "0.68990195", "0.68857986", "0.68790203", "0.68753874", "0.6872925", "0.6856442", "0.68425745", "0.68342584", "0.68044364", "0.68026924", "0.67930776", "0.67771095", "0.6763971", "0.6756948", "0.67537236", "0.6746962", "0.67120814", "0.6687662", "0.66826123", "0.6676636", "0.66739374", "0.6634843", "0.6624793", "0.66145694", "0.6610472", "0.6607261", "0.6599028", "0.65935177", "0.6579268", "0.6566717", "0.65630573", "0.65438944", "0.65406567", "0.6532618", "0.6513074", "0.6512399", "0.6485873", "0.647483", "0.64739203", "0.6450833", "0.6445192", "0.643535", "0.6419954", "0.6406943", "0.64036715", "0.64006376", "0.63840467", "0.63793516", "0.6374518", "0.6374518", "0.63640237", "0.6360441", "0.63577235", "0.6346643", "0.6346585", "0.6339553", "0.63390124", "0.63237816", "0.6309991", "0.6298814", "0.6275145", "0.6273799", "0.62461036", "0.6244815", "0.62444806", "0.6242345", "0.62384826", "0.62344414", "0.6217228", "0.6214114" ]
0.6397124
76
Adds a node to the front of the list with value 'val'
def push_front(self, val): new_node = Node(val, self.head) if self.is_empty(): self.tail = new_node self.head = new_node self.size += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def push_front(self, val: Generic[T]) -> None:\n first_node = self.node.next\n\n self.node.next = Node(val)\n latest_first = self.node.next\n\n latest_first.prev = self.node #pushes the node to the front\n latest_first.next = first_node\n first_node.prev = latest_first #rearranges the list", "def addAtHead(self, val):\n tmp = Node(val)\n tmp.nxt = self.head\n self.head = tmp\n if not self.tail:\n self.tail = self.head", "def addAtHead(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n node.next = self.head\n self.head = node", "def push(self, val):\n self.head = Node(val, self.head)", "def addAtHead(self, val: int) -> None:\n if(self.head == None):\n self.head = Node(val)\n else:\n new_node = Node(val)\n new_node.next = self.head \n self.head = new_node", "def addAtHead(self, val: int) -> None:\n if self.head:\n temp_node = MyListNode(val, next_node=self.head, prev_node=None)\n self.head.prev = temp_node\n self.head = temp_node\n self.node_count += 1\n else:\n self.head = MyListNode(val)\n self.node_count += 1", "def addNode(self, val):\n\t\tnode = self.createNode(val)\n\t\tif self.head is None:\n\t\t\tself.head = node\n\t\t\treturn node\n\t\tcur = self.head\n\t\twhile cur.getNext() is not None:\n\t\t\tcur = cur.getNext()\n\t\tcur.setNext(node)\n\t\treturn node", "def addAtHead(self, val):\n new_node = ListNode(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def addAtHead(self, val):\n cur = linkNode(val)\n cur.next = self.head.next\n cur.prev = self.head\n\n self.head.next = cur\n if cur.next:\n cur.next.prev = cur\n\n if cur.next == None: # first node\n self.tail = cur\n # self.printList()", "def addAtHead(self, val: int) -> None:\n pred, succ = self.head, self.head.next\n cur = Node(val)\n cur.next = succ\n cur.prev = pred\n pred.next = cur\n succ.prev = cur\n self.size += 1\n # print(\"addHead\", self.head.next.val)", "def addAtHead(self, val):\n node = Node(val)\n node.next = self.head\n self.head = node\n\n self.size += 1", "def addAtHead(self, val):\n node = Node(val)\n node.next = self.head\n self.head = node\n\n self.size += 1", "def addAtHead(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def addAtHead(self, val: int) -> None:\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node", "def push(self, val):\n node = Node(val)\n node.next_node = self.head\n self.head = node", "def push(self, val):\n new_head = Node(val, self.head)\n self.head = new_head\n self._counter += 1", "def addAtHead(self, val):\n new_head = Node(val)\n if self._size == 0:\n self._head = new_head\n self._tail = self._head\n else:\n new_head.next = self._head\n self._head = new_head\n self._size += 1", "def insert(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node", "def addAtHead(self, val):\n node = ListNode(val)\n node.next = self.head.next\n self.head.next = node\n if self.head is self.tail:\n self.tail = node\n self.len += 1", "def add_node(self, val):\n if val not in self:\n self.setdefault(val, [])", "def add_first(self, value):\n self.head = Node(value, self.head)", "def add_front(self, key, value):\r\n\t\tnew_node = SLNode(key, value)\r\n\t\tnew_node.next = self.head\r\n\t\tself.head = new_node\r\n\t\tself.size = self.size + 1", "def add_front(self, key, value):\n new_node = SLNode(key, value)\n new_node.next = self.head\n self.head = new_node\n self.size = self.size + 1", "def add_front(self, key, value):\n new_node = SLNode(key, value)\n new_node.next = self.head\n self.head = new_node\n self.size = self.size + 1", "def push_front(self, value):\n new_node = self.Node(value)\n\n # Edge Case : List is empty\n if self._size == 0:\n self._tail = new_node\n self._head = new_node\n self._size += 1\n return\n\n new_node.next = self._head\n self._head.prev = new_node\n self._head = new_node\n self._size += 1", "def addAtTail(self, val: int) -> None:\n if(self.head == None):\n self.head = Node(val)\n else:\n cur = self.head \n while cur.next != None:\n cur = cur.next \n\n cur.next = Node(val)", "def addAtHead(self, val):\n self.nums.insert(0, val)", "def addAtTail(self, val: int) -> None:\n new_node = Node(val)\n temp = self.head\n if self.head is None:\n self.head = new_node\n while temp.next:\n temp = temp.next\n temp.next = new_node", "def push_front(self, value):\n node = DLLNode(value)\n if self.head is None:\n self.tail = node \n else: \n self.head.prev_node = node \n node.next_node = self.head\n self.head = node", "def addAtTail(self, val: int) -> None:\n pred, succ = self.tail.prev, self.tail\n cur = Node(val)\n cur.next = succ\n cur.prev = pred\n pred.next = cur\n succ.prev = cur\n self.size += 1\n # print(\"addAtTail\", self.tail.prev.val)", "def push(self, val):\n self.head = Node(val, self.head)\n self._length += 1", "def push_back(self, val):\n new_node = Node(val)\n # Update current head and tail, if necessary\n if self.is_empty():\n self.head = new_node\n else:\n self.tail.next_node = new_node\n # new_node is now the tail\n self.tail = new_node\n self.size += 1", "def push(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def push(self, val: str) -> None:\n if self.head is None:\n self.head = Node(val)\n self.tail = self.head\n else:\n node = Node(val)\n self.tail.next = node\n self.tail = node", "def addAtTail(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n cur = self.head\n while cur.next != None:\n cur = cur.next\n cur.next = node", "def add_to_head(self, value):\n node = Node(value)\n if self.head is not None:\n node.set_next(self.head)\n\n self.head = node", "def addAtTail(self, val):\n tmp = Node(val)\n if self.tail:\n self.tail.nxt = tmp\n k = self.tail\n self.tail = tmp\n else:\n self.head = tmp\n self.tail = tmp\n k = tmp", "def prepend(self, value):\r\n if self.head is None:\r\n self.head = Node(value)\r\n return\r\n new_node = Node(value)\r\n new_node.next = self.head\r\n self.head = new_node", "def addAtHead(self, val: int) -> None:\n self.addAtIndex(0, val)", "def addAtHead(self, val: int) -> None:\n self.addAtIndex(0, val)", "def push(self, val):\n self._linkedlist.push(val)\n self._update_attr()", "def add_node(self, val):\n if val in self._g:\n raise ValueError('Node already exists.')\n self._g[val] = []", "def push(self, val):\n try:\n node = Node(val, self.top)\n except TypeError:\n return self.top\n self.top = node\n self._size += 1\n return self.top", "def add_node(self, val):\n if val not in self:\n self.setdefault(val, {})", "def insert(self, val):\n inserted_node = DblNode(val, self.head)\n if not self.head:\n self.head = inserted_node\n self.tail = self.head\n self.head.previous_node = inserted_node\n self.head = inserted_node", "def addAtTail(self, val):\n if self.head is None:\n self.addAtHead(val)\n else:\n new_node = Node(val)\n curr = self.head\n while (curr.next is not None):\n curr = curr.next\n\n curr.next = new_node\n new_node.prev = curr\n self.length += 1", "def insert(self, value):\n self.head = Node(value, self.head)", "def addAtTail(self, val):\n curr = self.head\n while curr.next:\n curr = curr.next\n new_node = ListNode(val)\n curr.next = new_node\n self.length += 1", "def push_back(self, val: Generic[T]) -> None:\n last_node = self.node.prev\n self.node.prev = Node(val) #pushes the node to the back\n latest_first = self.node.prev\n\n latest_first.next = self.node #rearranges the list\n latest_first.prev = last_node\n last_node.next = latest_first", "def insert(self, value):\n old_head = self.head\n self.head = Node(value, old_head)\n if self.count > 0: # if any Nodes: set tail previous to current Node\n old_head.next = self.head\n else: # adding to an empty, than define front\n self.tail = self.head\n self.count += 1", "def addAtTail(self, val):\n cur = linkNode(val)\n if self.tail == None: # first node\n self.head.next = cur\n cur.prev = self.head\n self.tail = cur\n else:\n self.tail.next = cur\n cur.prev = self.tail\n self.tail = cur # update tail\n # self.printList()", "def addAtTail(self, val):\n curr = self.head\n if curr is None:\n self.head = Node(val)\n else:\n while curr.next is not None:\n curr = curr.next\n curr.next = Node(val)\n\n self.size += 1", "def addAtTail(self, val):\n curr = self.head\n if curr is None:\n self.head = Node(val)\n else:\n while curr.next is not None:\n curr = curr.next\n curr.next = Node(val)\n\n self.size += 1", "def add_to_head(self, value):\n\n new_node = ListNode(value)\n if self.size == 0:\n self.head = new_node\n self.tail = new_node\n\n else:\n new_node.next = self.head\n self.head.prev = new_node\n new_node.next = self.head\n self.head = new_node\n\n # increments the size attribute after adding node to list\n self.size += 1", "def append(self, val):\n inserted_node = DblNode(val, previous_node=self.tail)\n self.tail.next_node = inserted_node\n self.tail = inserted_node", "def addAtTail(self, val: int) -> None:\n tmp = ListNode(val)\n \n tmp1 = self.dummy\n while tmp1 and tmp1.next:\n tmp1 = tmp1.next\n tmp1.next = tmp\n tmp.pre = tmp1\n self.cnt += 1", "def enqueue(self, val):\n\n if self.front is None:\n self.back = self.front = Node(val)\n self._size += 1\n else:\n self.back._next = self.back = Node(val)\n self._size += 1\n return self.back", "def addAtIndex(self, index: int, val: int) -> None:\n if(index == 0):\n new_node = Node(val)\n new_node.next = self.head \n self.head = new_node \n else:\n cnt = 0\n cur = self.head\n prev = None \n\n while cur != None:\n if(cnt == index):\n new_node = Node(val)\n new_node.next = cur \n prev.next = new_node\n return \n else:\n prev = cur \n cur = cur.next\n cnt += 1\n \n if(cnt == index):\n cur = Node(val)\n prev.next = cur", "def __add_first(self, value):\n node = self.Node(value, self.__head)\n if self.__head == None: # when this is the first element being added,\n self.__last = node # set the last pointer to this new node\n self.__head = node\n self.__length += 1", "def append(self, value):\n old_tail = self.tail\n self.tail = Node(value, None, old_tail)\n if self.count > 0: # if any Nodes: set tail previous to current Node\n old_tail.previous = self.tail\n else: # adding to an empty, than define front\n self.head = self.tail\n self.count += 1", "def push(self, value):\r\n new_node = Node(value)\r\n print('pushing value : ' + str(value))\r\n # print (self.head, new_node.get_value())\r\n if self.head:\r\n # tmp_node = self.head\r\n new_node.set_next(self.head) # set new node's next = head node's next\r\n self.head = new_node # assign the new node as the Head node\r\n else:\r\n self.head = new_node", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def add(self, val):\n if val < self.val:\n if self.left:\n self.left.add(val)\n else:\n self.left = Tree1(val)\n elif val > self.val:\n if self.right:\n self.right.add(val)\n else:\n self.right = Tree1(val)", "def append(self, value):\n new_node = Node(value)\n if self.head:\n node = self.head\n while node.next != self.head:\n node = node.next\n node.next = new_node\n else:\n self.head = new_node\n new_node.next = self.head", "def addAtIndex(self, index, val):\n if index < 0:\n new_node = ListNode(val)\n new_node.next = self.head\n self.head = new_node\n\n if index <= self.length:\n prev = None\n curr = self.head\n for i in range(1, index + 1):\n prev = curr\n curr = curr.next\n new_node = ListNode(val)\n if prev:\n prev.next = new_node\n else:\n self.head = new_node\n new_node.next = curr\n self.length += 1", "def addAtIndex(self, index: int, val: int) -> None:\n # If index is greater than the length, \n # the node will not be inserted.\n if index > self.size:\n return\n \n # [so weird] If index is negative, \n # the node will be inserted at the head of the list.\n if index < 0:\n index = 0\n \n # find predecessor and successor of the node to be added\n if index < self.size - index:\n pred = self.head\n for _ in range(index):\n pred = pred.next\n succ = pred.next\n else:\n succ = self.tail\n for _ in range(self.size - index):\n succ = succ.prev\n pred = succ.prev\n \n # insertion itself\n self.size += 1\n to_add = ListNode(val)\n to_add.prev = pred\n to_add.next = succ\n pred.next = to_add\n succ.prev = to_add", "def addAtIndex(self, index, val):\n if index > 0 and not self.head:\n return\n \n tmp = Node(val)\n if index == 0 and not self.head:\n self.head = tmp\n self.tail = self.head\n return\n if index == 0 and self.head:\n tmp.nxt = self.head\n self.head = tmp \n return\n \n \n cur = self.head\n i = 1\n while i < index and cur:\n cur = cur.nxt\n i+=1\n if i == index:\n if not cur:\n if self.tail:\n self.tail.nxt = tmp\n self.tail = tmp\n else:\n self.head = tmp\n self.tail = tmp\n# print(\"KMG 1\")\n else:\n# print(\"inserting after the value %d\" %cur.val)\n tmp.nxt = cur.nxt\n cur.nxt = tmp\n if self.tail == cur:\n self.tail = tmp", "def append(self, value):\n if not self.head:\n self.head = Node(value)\n return\n link = self.head\n while link.next_value:\n link = link.next_value\n link.next_value = Node(value)\n return", "def addAtIndex(self, index, val):\n if index < 0:\n return -1\n\n p = self.head\n while index and p: # 0-index before index-th\n p = p.next\n index -= 1\n\n if p == None:\n return\n cur = linkNode(val)\n cur.next = p.next\n cur.prev = p\n if p.next:\n p.next.prev = cur\n p.next = cur\n if cur.next == None: # tail\n self.tail = cur\n # self.printList()", "def addAtTail(self, val):\n node = ListNode(val)\n self.tail.next = node\n self.tail = node\n self.len += 1", "def __addToLevel(self, head, value):\n\n #if DEBUG: print('\\t__addToLevel({})'.format(value))\n\n cur = head\n \n if cur.next == None:\n output = self.__insert(cur,value)\n return output\n \n #cur = cur.next\n\n while cur:\n if cur.next == None or \\\n cur.val == value or\\\n cur.next.val > value:\n output = self.__insert(cur,value)\n #output = cur\n break\n cur = cur.next\n return output", "def append(self, value):\r\n\r\n if self.head is None:\r\n self.head = Node(value)\r\n return\r\n node = self.head\r\n while node.next:\r\n node = node.next\r\n node.next = Node(value)", "def push(self, val):\n self.insert(val)", "def append(self, value):\n if self.head is None:\n self.head = Node(value)\n return\n node = self.head\n while node.next:\n node = node.next\n node.next = Node(value)", "def add_sorted(self, val):\n if self.root is None:\n self.root = TreeNode(val)\n else:\n self._add_sorted(val, self.root)", "def add(self, value):\n # Find the tail\n tail = self.head\n while tail and tail.next:\n tail = tail.next\n\n if tail:\n # Add a new node with the value\n tail.next = Node(value, tail, None)\n else:\n # Add first node to the list\n self.head = Node(value, None, None)", "def addAtTail(self, val):\n new_tail = Node(val)\n if self._size == 0:\n self._head = new_head\n self._tail = self._head\n else:\n self._tail.next = new_tail\n self._tail = new_tail\n self._size += 1", "def insert(self, value, pos):\r\n\r\n if self.head is None:\r\n self.head = Node(value)\r\n return\r\n\r\n if pos == 0:\r\n self.prepend(value)\r\n return\r\n\r\n index = 0\r\n node = self.head\r\n while node.next and index <= pos:\r\n if (pos - 1) == index:\r\n new_node = Node(value)\r\n new_node.next = node.next\r\n node.next = new_node\r\n return\r\n\r\n index += 1\r\n node = node.next\r\n else:\r\n self.append(value)", "def append(self, value):\n current = self.head\n\n while current:\n if current.next == None:\n current.next = Node(value)\n break\n current = current.next", "def add(self, val):\n if val <= self.value:\n if self.left: \n ''' if left child created just add the value'''\n self.left.add(val)\n else:\n self.left = BinaryNode(val)\n else:\n if self.right:\n self.right.add(val)\n else:\n self.right = BinaryNode(val)", "def append(self, value):\n node = SLLNode(value)\n if self.head is None: \n self.head = node \n else: \n tail_node = self.head\n while tail_node.next_node is not None: \n tail_node = tail_node.next_node\n tail_node.next_node = node", "def push(self, value):\n new_node = Node(value)\n new_node.next = self.head\n self.head = new_node\n self.count += 1\n return new_node", "def push_front(self, item):\n new_node = Node(item)\n # if the list is empty, make it head\n if self.head is None:\n self.head = new_node\n # else, \n else:\n new_node.next = self.head # new node points to current head\n self.head = new_node # current head points to new_node\n self.n += 1", "def insert_before(self, key, value):\n # Iterating to node that has value\n node = self.head\n last_node = None\n while node is not None and node.value != key:\n last_node = node\n node = node.next_\n\n # Check if the node has been found\n if node is None:\n return\n\n # Checking whether head matched\n if last_node is None:\n self.add_first(value)\n return\n\n # Inserting new node\n last_node.next_ = Node(value, node)", "def addAtIndex(self, index: int, val: int) -> None:\n new_node = Node(val)\n curr = self.head\n if index < 1:\n self.head = new_node\n return\n count = 0\n prev = self.head\n while curr:\n count += 1\n if count == index:\n new_node.next = prev.next\n prev.next = new_node\n break\n prev = curr.next\n curr = curr.next\n \n if count == index:\n curr.next = new_node\n else:\n return -1", "def addAtIndex(self, index, val):\n if index == 0:\n self.addAtHead(val)\n\n cur, node = self.head, ListNode(val)\n i = 1\n while cur and i != index:\n cur = cur.next\n i += 1\n if cur:\n nxt = cur.next\n cur.next = node\n cur = cur.next\n cur.next = nxt", "def appendleft(self, val):\n self._values.append(val)", "def enqueue(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def enqueue(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def insert(self, value):\n node = Node(value)\n\n if self.head is not None:\n node.next = self.head\n self.head = node", "def addAtTail(self, val: int) -> None:\n '''node = self.head\n\n if node:\n while True:\n if node.next:\n node = node.next\n else:\n node.next = MyListNode(val,next_node=None,prev_node=node)\n break\n else:\n self.head = MyListNode(val)'''\n\n if self.node_count > 0:\n node = self.get_node(self.node_count - 1)\n #print(\"Last \")\n temp_node = MyListNode(val, next_node=None, prev_node=node)\n node.next = temp_node\n self.node_count += 1\n else:\n self.head = MyListNode(val)\n self.node_count += 1", "def push(self, value):\r\n new_node = Node(value)\r\n print('pushing value to the Tail : ' + str(value))\r\n if self.head:\r\n node = self.head\r\n while node.get_next():\r\n node = node.get_next()\r\n node.set_next(new_node)\r\n else:\r\n self.head = new_node", "def addAtIndex(self, index: int, val: int) -> None:\n if self.size < index:\n return\n if index < 0:\n return\n\n new_node = Node(val)\n curr = self.head\n for _ in range(index):\n curr = curr.next\n new_node.next = curr.next\n curr.next = new_node\n self.size += 1", "def addNodeBefore(self, new_value, before_node): # Class O(n)\r\n if not isinstance(new_value, Node):\r\n if new_value % 1 != 0: raise ValueError(\"Please, insert an integer\")\r\n if before_node > self.length(): raise ValueError(\"Invalid position\")\r\n if before_node == 1:\r\n self.head = Node(new_value, self.head)\r\n else:\r\n self.addNodeAfter(new_value, before_node - 1)", "def insert(self, val):\n if self.val is None:\n self.__init__(val)\n elif self.val > val:\n self.left.insert(val)\n elif self.val < val:\n self.right.insert(val)", "def addAtIndex(self, index, val):\n if index < 0 or index > self.size:\n return\n\n if index == 0:\n self.addAtHead(val)\n else:\n curr = self.head\n for i in range(index - 1):\n curr = curr.next\n node = Node(val)\n node.next = curr.next\n curr.next = node\n\n self.size += 1", "def addAtIndex(self, index, val):\n if index < 0 or index > self.size:\n return\n\n if index == 0:\n self.addAtHead(val)\n else:\n curr = self.head\n for i in range(index - 1):\n curr = curr.next\n node = Node(val)\n node.next = curr.next\n curr.next = node\n\n self.size += 1", "def push(self, key, val):\n # create new node and add to data\n new_ele = Node(key, val)\n self._data.append(new_ele)\n # percolate number into correct place\n self.percolate_up(len(self)-1)" ]
[ "0.80221725", "0.7826861", "0.7751701", "0.77292", "0.7605654", "0.75942093", "0.75738394", "0.755168", "0.7545137", "0.7532508", "0.75297654", "0.75297654", "0.75270396", "0.75006205", "0.7481478", "0.7417329", "0.7411148", "0.7410026", "0.7406186", "0.74055594", "0.738027", "0.7368777", "0.7326956", "0.7326956", "0.73056793", "0.72869956", "0.7285683", "0.72437495", "0.7238271", "0.72258633", "0.72082925", "0.7194072", "0.7156052", "0.7127973", "0.7125612", "0.7119841", "0.7116917", "0.710315", "0.70905876", "0.70905876", "0.7026285", "0.7020351", "0.70091224", "0.7001758", "0.6992369", "0.69909734", "0.6967518", "0.69501805", "0.6948056", "0.6944519", "0.6918779", "0.6911195", "0.6911195", "0.6874471", "0.6817689", "0.68162614", "0.6815436", "0.6790261", "0.67741376", "0.67580223", "0.67555875", "0.6755401", "0.6755401", "0.6755401", "0.67537034", "0.67351806", "0.6730285", "0.6721844", "0.67199045", "0.6701729", "0.66978383", "0.6695864", "0.66890365", "0.6685381", "0.66765255", "0.66721904", "0.6656623", "0.66538763", "0.6653085", "0.66460204", "0.66337204", "0.66162497", "0.6592746", "0.65915823", "0.65856314", "0.65780914", "0.6577541", "0.6574639", "0.65706015", "0.6570157", "0.6570157", "0.6555976", "0.6555213", "0.6545751", "0.65439385", "0.65404147", "0.65383965", "0.6535129", "0.6535129", "0.65240395" ]
0.7917012
1
Adds a node to the back of the list with value 'val'
def push_back(self, val): new_node = Node(val) # Update current head and tail, if necessary if self.is_empty(): self.head = new_node else: self.tail.next_node = new_node # new_node is now the tail self.tail = new_node self.size += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def push_back(self, val: Generic[T]) -> None:\n last_node = self.node.prev\n self.node.prev = Node(val) #pushes the node to the back\n latest_first = self.node.prev\n\n latest_first.next = self.node #rearranges the list\n latest_first.prev = last_node\n last_node.next = latest_first", "def push(self, val):\n self.head = Node(val, self.head)", "def push(self, val):\n node = Node(val)\n node.next_node = self.head\n self.head = node", "def addAtTail(self, val: int) -> None:\n pred, succ = self.tail.prev, self.tail\n cur = Node(val)\n cur.next = succ\n cur.prev = pred\n pred.next = cur\n succ.prev = cur\n self.size += 1\n # print(\"addAtTail\", self.tail.prev.val)", "def addAtTail(self, val: int) -> None:\n new_node = Node(val)\n temp = self.head\n if self.head is None:\n self.head = new_node\n while temp.next:\n temp = temp.next\n temp.next = new_node", "def addAtTail(self, val: int) -> None:\n if(self.head == None):\n self.head = Node(val)\n else:\n cur = self.head \n while cur.next != None:\n cur = cur.next \n\n cur.next = Node(val)", "def push(self, val):\n new_head = Node(val, self.head)\n self.head = new_head\n self._counter += 1", "def addAtTail(self, val):\n tmp = Node(val)\n if self.tail:\n self.tail.nxt = tmp\n k = self.tail\n self.tail = tmp\n else:\n self.head = tmp\n self.tail = tmp\n k = tmp", "def append(self, val):\n inserted_node = DblNode(val, previous_node=self.tail)\n self.tail.next_node = inserted_node\n self.tail = inserted_node", "def add_node(self, val):\n if val in self._g:\n raise ValueError('Node already exists.')\n self._g[val] = []", "def add_node(self, val):\n if val not in self:\n self.setdefault(val, [])", "def push_back(self, value):\n\n # Edge Case : List is empty\n # Behave just like push_front()\n if self._size == 0:\n self.push_front(value)\n return\n\n new_node = self.Node(value)\n new_node.prev = self._tail\n self._tail.next = new_node\n self._tail = new_node\n self._size += 1", "def push(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def push(self, val):\n self._linkedlist.push(val)\n self._update_attr()", "def push(self, val: str) -> None:\n if self.head is None:\n self.head = Node(val)\n self.tail = self.head\n else:\n node = Node(val)\n self.tail.next = node\n self.tail = node", "def push(self, val):\n try:\n node = Node(val, self.top)\n except TypeError:\n return self.top\n self.top = node\n self._size += 1\n return self.top", "def addAtTail(self, val):\n if self.head is None:\n self.addAtHead(val)\n else:\n new_node = Node(val)\n curr = self.head\n while (curr.next is not None):\n curr = curr.next\n\n curr.next = new_node\n new_node.prev = curr\n self.length += 1", "def push(self, val):\n self.head = Node(val, self.head)\n self._length += 1", "def addAtTail(self, val):\n cur = linkNode(val)\n if self.tail == None: # first node\n self.head.next = cur\n cur.prev = self.head\n self.tail = cur\n else:\n self.tail.next = cur\n cur.prev = self.tail\n self.tail = cur # update tail\n # self.printList()", "def addAtTail(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n cur = self.head\n while cur.next != None:\n cur = cur.next\n cur.next = node", "def addAtTail(self, val):\n curr = self.head\n if curr is None:\n self.head = Node(val)\n else:\n while curr.next is not None:\n curr = curr.next\n curr.next = Node(val)\n\n self.size += 1", "def addAtTail(self, val):\n curr = self.head\n if curr is None:\n self.head = Node(val)\n else:\n while curr.next is not None:\n curr = curr.next\n curr.next = Node(val)\n\n self.size += 1", "def addNode(self, val):\n\t\tnode = self.createNode(val)\n\t\tif self.head is None:\n\t\t\tself.head = node\n\t\t\treturn node\n\t\tcur = self.head\n\t\twhile cur.getNext() is not None:\n\t\t\tcur = cur.getNext()\n\t\tcur.setNext(node)\n\t\treturn node", "def addAtTail(self, val):\n new_tail = Node(val)\n if self._size == 0:\n self._head = new_head\n self._tail = self._head\n else:\n self._tail.next = new_tail\n self._tail = new_tail\n self._size += 1", "def addAtTail(self, val):\n curr = self.head\n while curr.next:\n curr = curr.next\n new_node = ListNode(val)\n curr.next = new_node\n self.length += 1", "def push_front(self, val):\n new_node = Node(val, self.head)\n if self.is_empty():\n self.tail = new_node\n self.head = new_node\n self.size += 1", "def addAtTail(self, val):\n node = ListNode(val)\n self.tail.next = node\n self.tail = node\n self.len += 1", "def push_front(self, val: Generic[T]) -> None:\n first_node = self.node.next\n\n self.node.next = Node(val)\n latest_first = self.node.next\n\n latest_first.prev = self.node #pushes the node to the front\n latest_first.next = first_node\n first_node.prev = latest_first #rearranges the list", "def addAtHead(self, val):\n tmp = Node(val)\n tmp.nxt = self.head\n self.head = tmp\n if not self.tail:\n self.tail = self.head", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def push(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def add_node(self, val):\n if val not in self:\n self.setdefault(val, {})", "def insert(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node", "def addAtTail(self, val: int) -> None:\n '''node = self.head\n\n if node:\n while True:\n if node.next:\n node = node.next\n else:\n node.next = MyListNode(val,next_node=None,prev_node=node)\n break\n else:\n self.head = MyListNode(val)'''\n\n if self.node_count > 0:\n node = self.get_node(self.node_count - 1)\n #print(\"Last \")\n temp_node = MyListNode(val, next_node=None, prev_node=node)\n node.next = temp_node\n self.node_count += 1\n else:\n self.head = MyListNode(val)\n self.node_count += 1", "def appendleft(self, val):\n self.new_dll.push(val)", "def append(self, value):\n if not self.head:\n self.head = Node(value)\n return\n link = self.head\n while link.next_value:\n link = link.next_value\n link.next_value = Node(value)\n return", "def push(self, value):\r\n new_node = Node(value)\r\n print('pushing value to the Tail : ' + str(value))\r\n if self.head:\r\n node = self.head\r\n while node.get_next():\r\n node = node.get_next()\r\n node.set_next(new_node)\r\n else:\r\n self.head = new_node", "def append(self, value):\n old_tail = self.tail\n self.tail = Node(value, None, old_tail)\n if self.count > 0: # if any Nodes: set tail previous to current Node\n old_tail.previous = self.tail\n else: # adding to an empty, than define front\n self.head = self.tail\n self.count += 1", "def push(self, key, val):\n self._data.append(Node(key, val))\n self.percolate_up(len(self._data) - 1)", "def appendleft(self, val):\n self._values.append(val)", "def push(self, key, val):\n # create new node and add to data\n new_ele = Node(key, val)\n self._data.append(new_ele)\n # percolate number into correct place\n self.percolate_up(len(self)-1)", "def push(self, val):\n self.insert(val)", "def addAtTail(self, val: int) -> None:\n tmp = ListNode(val)\n \n tmp1 = self.dummy\n while tmp1 and tmp1.next:\n tmp1 = tmp1.next\n tmp1.next = tmp\n tmp.pre = tmp1\n self.cnt += 1", "def insert(self, val):\n inserted_node = DblNode(val, self.head)\n if not self.head:\n self.head = inserted_node\n self.tail = self.head\n self.head.previous_node = inserted_node\n self.head = inserted_node", "def append(self, value):\n new_node = Node(value)\n if self.head:\n node = self.head\n while node.next != self.head:\n node = node.next\n node.next = new_node\n else:\n self.head = new_node\n new_node.next = self.head", "def add(self, val):\n if val <= self.value:\n if self.left: \n ''' if left child created just add the value'''\n self.left.add(val)\n else:\n self.left = BinaryNode(val)\n else:\n if self.right:\n self.right.add(val)\n else:\n self.right = BinaryNode(val)", "def addAtHead(self, val: int) -> None:\n pred, succ = self.head, self.head.next\n cur = Node(val)\n cur.next = succ\n cur.prev = pred\n pred.next = cur\n succ.prev = cur\n self.size += 1\n # print(\"addHead\", self.head.next.val)", "def addAtHead(self, val):\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def push(self, val):\n self.high_low.append(val)\n try:\n self.compare_parent(self.high_low.index(self.high_low[-1]))\n except (ValueError, IndexError):\n pass", "def append(self, value):\r\n\r\n if self.head is None:\r\n self.head = Node(value)\r\n return\r\n node = self.head\r\n while node.next:\r\n node = node.next\r\n node.next = Node(value)", "def append(self, value):\n node = SLLNode(value)\n if self.head is None: \n self.head = node \n else: \n tail_node = self.head\n while tail_node.next_node is not None: \n tail_node = tail_node.next_node\n tail_node.next_node = node", "def append(self, value):\n if self.head is None:\n self.head = Node(value)\n return\n node = self.head\n while node.next:\n node = node.next\n node.next = Node(value)", "def add(self, val):\n if val < self.val:\n if self.left:\n self.left.add(val)\n else:\n self.left = Tree1(val)\n elif val > self.val:\n if self.right:\n self.right.add(val)\n else:\n self.right = Tree1(val)", "def push(self, value): ################# <-\n self.lst = self.lst +[value]", "def append(self, value):\n current = self.head\n\n while current:\n if current.next == None:\n current.next = Node(value)\n break\n current = current.next", "def append(self, value):\n node = DLLNode(value)\n if self.head is None: \n self.head = node \n self.tail = node \n else: \n self.tail.next_node = node \n node.prev_node = self.tail \n self.tail = node", "def append(self, val):\n self.val.append(val)", "def push(self, value):\r\n new_node = Node(value)\r\n print('pushing value : ' + str(value))\r\n # print (self.head, new_node.get_value())\r\n if self.head:\r\n # tmp_node = self.head\r\n new_node.set_next(self.head) # set new node's next = head node's next\r\n self.head = new_node # assign the new node as the Head node\r\n else:\r\n self.head = new_node", "def addAtHead(self, val: int) -> None:\n if(self.head == None):\n self.head = Node(val)\n else:\n new_node = Node(val)\n new_node.next = self.head \n self.head = new_node", "def enqueue(self, val):\n\n if self.front is None:\n self.back = self.front = Node(val)\n self._size += 1\n else:\n self.back._next = self.back = Node(val)\n self._size += 1\n return self.back", "def addAtHead(self, val):\n new_head = Node(val)\n if self._size == 0:\n self._head = new_head\n self._tail = self._head\n else:\n new_head.next = self._head\n self._head = new_head\n self._size += 1", "def addAtHead(self, val):\n cur = linkNode(val)\n cur.next = self.head.next\n cur.prev = self.head\n\n self.head.next = cur\n if cur.next:\n cur.next.prev = cur\n\n if cur.next == None: # first node\n self.tail = cur\n # self.printList()", "def addAtHead(self, val):\n new_node = ListNode(val)\n new_node.next = self.head\n self.head = new_node\n self.length += 1", "def addAtHead(self, val):\n node = Node(val)\n node.next = self.head\n self.head = node\n\n self.size += 1", "def addAtHead(self, val):\n node = Node(val)\n node.next = self.head\n self.head = node\n\n self.size += 1", "def push(self, value):\n if self.head == None:\n self.head = Node(value)\n self.tail = self.head\n self.current = self.head\n else:\n newNode = Node(value)\n newNode.prev = self.tail\n self.tail.next = newNode\n self.tail = self.tail.next\n self.length = self.length + 1", "def addAtHead(self, val: int) -> None:\n new_node = Node(val)\n new_node.next = self.head\n self.head = new_node", "def push(self, value):\n new_node = Node(value)\n new_node.next = self.head\n self.head = new_node\n self.count += 1\n return new_node", "def addAtHead(self, val):\n node = ListNode(val)\n if self.head == None:\n self.head = node\n else:\n node.next = self.head\n self.head = node", "def add_last(self, value):\n # Checking for empty list\n if self.head is None:\n self.add_first(value)\n return\n\n # Add new node\n self._add_last(self.head, value)", "def addAtHead(self, val: int) -> None:\n if self.head:\n temp_node = MyListNode(val, next_node=self.head, prev_node=None)\n self.head.prev = temp_node\n self.head = temp_node\n self.node_count += 1\n else:\n self.head = MyListNode(val)\n self.node_count += 1", "def addAtHead(self, val):\n node = ListNode(val)\n node.next = self.head.next\n self.head.next = node\n if self.head is self.tail:\n self.tail = node\n self.len += 1", "def addAtTail(self, val):\n self.nums.append(val)", "def addAtIndex(self, index, val):\n if index > 0 and not self.head:\n return\n \n tmp = Node(val)\n if index == 0 and not self.head:\n self.head = tmp\n self.tail = self.head\n return\n if index == 0 and self.head:\n tmp.nxt = self.head\n self.head = tmp \n return\n \n \n cur = self.head\n i = 1\n while i < index and cur:\n cur = cur.nxt\n i+=1\n if i == index:\n if not cur:\n if self.tail:\n self.tail.nxt = tmp\n self.tail = tmp\n else:\n self.head = tmp\n self.tail = tmp\n# print(\"KMG 1\")\n else:\n# print(\"inserting after the value %d\" %cur.val)\n tmp.nxt = cur.nxt\n cur.nxt = tmp\n if self.tail == cur:\n self.tail = tmp", "def append(self, val: T) -> None:\n self.concat(Linked(val))", "def addAtIndex(self, index: int, val: int) -> None:\n if(index == 0):\n new_node = Node(val)\n new_node.next = self.head \n self.head = new_node \n else:\n cnt = 0\n cur = self.head\n prev = None \n\n while cur != None:\n if(cnt == index):\n new_node = Node(val)\n new_node.next = cur \n prev.next = new_node\n return \n else:\n prev = cur \n cur = cur.next\n cnt += 1\n \n if(cnt == index):\n cur = Node(val)\n prev.next = cur", "def set_child(self, val, end=False):\n self._children[val] = TrieNode(val, end)", "def insertVal(self, val):\n pybtlib.insertVal.restype = None\n pybtlib.insertVal.argtypes = [ctypes.POINTER(Tree), ctypes.c_int]\n try:\n for i in val:\n pybtlib.insertVal(ctypes.byref(self), i)\n except:\n pybtlib.insertVal(ctypes.byref(self), val)\n return", "def add(self, value):\n self.children.append(Node(value))", "def addAtTail(self, val: int) -> None:\n self.addAtIndex(self.size, val)", "def add_to_tail(self, value):\n\n new_node = ListNode(value)\n\n if self.size == 0: # if list is empty\n self.head = self.tail = new_node # make new_node both head and tail\n\n else:\n self.tail.next = new_node # place new_node after tail\n new_node.prev = self.tail # place current tail before new_node\n self.tail = new_node # replace self.tail\n\n self.size += 1 # increase size of list", "def push_back(self, item):\n new_node = Node(item) # first create a node\n # if the list is empty, make it head\n if self.head is None:\n self.head = new_node\n else:\n # else, travel till the end \n temp_node = self.head\n while temp_node.next is not None:\n temp_node = temp_node.next\n # make last node to point to new_node\n temp_node.next = new_node\n self.n += 1 # increment no. of items", "def _add_last(cls, node, value):\n # Check if element is the last element\n if node.next_ is None:\n node.next_ = Node(value)\n return\n\n # Recursively go to next node\n cls._add_last(node.next_, value)", "def addAtIndex(self, index, val):\n if index < 0:\n new_node = ListNode(val)\n new_node.next = self.head\n self.head = new_node\n\n if index <= self.length:\n prev = None\n curr = self.head\n for i in range(1, index + 1):\n prev = curr\n curr = curr.next\n new_node = ListNode(val)\n if prev:\n prev.next = new_node\n else:\n self.head = new_node\n new_node.next = curr\n self.length += 1", "def add(self, value):\n # Find the tail\n tail = self.head\n while tail and tail.next:\n tail = tail.next\n\n if tail:\n # Add a new node with the value\n tail.next = Node(value, tail, None)\n else:\n # Add first node to the list\n self.head = Node(value, None, None)", "def append(self, val):\n self._values.push(val)", "def insert(self, val):\n if val not in self.dict_val:\n self.dict_val[val] = len(self.list_val)\n self.list_val.append(val)\n return True\n return False", "def append(self, value):\n node = Node(value)\n if self._head is None:\n self._head = node\n else:\n current = self._head\n while current.next:\n current = current.next\n current.next = node\n self._size += 1", "def push(self, val):\r\n return self.deque.append(val)", "def push_front(self, value):\n node = DLLNode(value)\n if self.head is None:\n self.tail = node \n else: \n self.head.prev_node = node \n node.next_node = self.head\n self.head = node", "def addAtIndex(self, index, val):\n if index < 0:\n return -1\n\n p = self.head\n while index and p: # 0-index before index-th\n p = p.next\n index -= 1\n\n if p == None:\n return\n cur = linkNode(val)\n cur.next = p.next\n cur.prev = p\n if p.next:\n p.next.prev = cur\n p.next = cur\n if cur.next == None: # tail\n self.tail = cur\n # self.printList()", "def push_front(self, value):\n new_node = self.Node(value)\n\n # Edge Case : List is empty\n if self._size == 0:\n self._tail = new_node\n self._head = new_node\n self._size += 1\n return\n\n new_node.next = self._head\n self._head.prev = new_node\n self._head = new_node\n self._size += 1", "def addAtIndex(self, index, val):\n if index > self.len:\n return\n p = self.head\n while index > 0:\n index -= 1\n p = p.next\n\n node = ListNode(val)\n node.next = p.next\n p.next = node\n\n if p is self.tail:\n self.tail = node\n self.len += 1", "def addAtIndex(self, index: int, val: int) -> None:\n new_node = Node(val)\n curr = self.head\n if index < 1:\n self.head = new_node\n return\n count = 0\n prev = self.head\n while curr:\n count += 1\n if count == index:\n new_node.next = prev.next\n prev.next = new_node\n break\n prev = curr.next\n curr = curr.next\n \n if count == index:\n curr.next = new_node\n else:\n return -1", "def insert(self, value):\n old_head = self.head\n self.head = Node(value, old_head)\n if self.count > 0: # if any Nodes: set tail previous to current Node\n old_head.next = self.head\n else: # adding to an empty, than define front\n self.tail = self.head\n self.count += 1", "def push(self, value: int):\n new_node = Node(value)\n new_node._next = self._head\n self._head = new_node\n self._len += 1\n print(f\"value: {value}, head: {self._head}, len: {self._len}, new_node: {new_node}\")", "def add_sorted(self, val):\n if self.root is None:\n self.root = TreeNode(val)\n else:\n self._add_sorted(val, self.root)", "def addAtIndex(self, index: int, val: int) -> None:\n if 0 < index < self.node_count:\n prev_neighbor = self.get_node(index - 1)\n next_neighbor = prev_neighbor.next\n temp_node = MyListNode(val)\n prev_neighbor.next = temp_node\n temp_node.next = next_neighbor\n temp_node.prev = prev_neighbor\n next_neighbor.prev = temp_node\n self.node_count += 1\n elif index == 0:\n self.addAtHead(val)\n #self.node_count += 1\n elif index == self.node_count:\n self.addAtTail(val)\n #self.node_count += 1", "def append(self, value: Any) -> None:\n new_node = Node(value)\n try: # Assume queue is not empty\n new_node.next = self._tail\n self._tail.prev = new_node\n except AttributeError: # Queue is empty\n self._head = new_node\n self._tail = new_node" ]
[ "0.8086445", "0.7602821", "0.75396514", "0.74361414", "0.7427383", "0.7410255", "0.74002004", "0.7388654", "0.73812497", "0.7362498", "0.7361017", "0.73132956", "0.73118097", "0.7306703", "0.72853166", "0.7268352", "0.72647965", "0.7225639", "0.7210625", "0.7206143", "0.71408105", "0.71408105", "0.7132019", "0.7109238", "0.70833707", "0.70181113", "0.6979405", "0.69201744", "0.6904901", "0.68448794", "0.68448794", "0.68448794", "0.68402946", "0.683134", "0.6812287", "0.6811985", "0.68109685", "0.6800884", "0.6800647", "0.67923963", "0.6787927", "0.6775549", "0.6762928", "0.676261", "0.67270815", "0.67183006", "0.6712953", "0.6669138", "0.6665035", "0.66612434", "0.6650752", "0.6650341", "0.66451997", "0.6636435", "0.66330093", "0.66311127", "0.6625865", "0.66008914", "0.6596934", "0.6579538", "0.6568982", "0.6552391", "0.6549541", "0.6548443", "0.6548396", "0.6548396", "0.6545535", "0.6541966", "0.6531567", "0.6495692", "0.64948285", "0.647106", "0.64669377", "0.64613926", "0.64458156", "0.64423496", "0.63723814", "0.63505405", "0.6341789", "0.63387096", "0.63368773", "0.6328771", "0.6327775", "0.6304741", "0.6303342", "0.6301218", "0.62961084", "0.6292177", "0.6287331", "0.62860596", "0.62518126", "0.6243354", "0.62283164", "0.6227066", "0.62247944", "0.6217198", "0.62066644", "0.61921495", "0.6191889", "0.6188388" ]
0.80715704
1
Removes a node from the front of the list
def pop_front(self): if self.is_empty(): return None val = self.head.value # Update head and size self.head = self.head.next_node self.size -= 1 # If the only node was removed, also need to update tail if self.is_empty(): self.tail = None return val
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_node_at_start(self):\n if not self.head:\n print('List already empty.')\n return\n self.head = self.head.next", "def pop_front(self):\n if self.head is None:\n raise IndexError('pop_front from empty list')\n node = self.head \n if node.next_node is None:\n self.tail = None \n else: \n node.next_node.prev_node = None \n self.head = node.next_node\n return node.value", "def remove_first(self):\n # return None if there are no Nodes\n if self.head is None:\n return None\n # save and disconect the first Node from the list\n # and set the head to the next Node\n removed = self.head\n self.head = self.head.next\n removed.next = None\n # set the tail as None if list got empty\n if self.head is None:\n self.tail = None\n # remove the skip back pointer from the second Node if needed\n elif self.head.next is not None:\n self.head.next.skip_back = None\n \n return removed.data", "def remove_front(self):\n\n if self.front.next is None:\n temp = self.front\n self.front = None\n return temp.data\n\n temp = self.front\n self.front = self.front.next\n return temp.data", "def remove_from_head(self):\n\n if self.size == 0: # no elements in list\n return None # nothing to return\n\n removed_value = self.head.value # make a copy of the node to be deleted\n\n if self.size == 1: # if only one element in list (node is head and tail)\n self.head = self.tail = None # list will be empty\n\n else: # more than one element in list\n self.head = self.head.next # shift head right (reassign head to head.next)\n self.head.prev = None # reassign head.prev to point at None (it used to point at old_head)\n\n self.size -= 1\n return removed_value", "def __remove_first(self):\n if self.__head is not None:\n self.__length -= 1\n self.__head = self.__head.next()\n if self.__length == 0: # when there are no more elements in the list,\n self.__last = None # remove the pointer to the last element", "def pop(self) -> None:\n node = self.head\n self.head = self.head.next\n node.next = None", "def delete_node_at_beginning(self):\n\t\tif self.root is None:\n\t\t\traise EmptyRootException(\"ERROR: No node available in list. Please insert node in list.\")\n\t\tcurrent_node = self.root\n\t\tself.root = current_node.next\n\t\tself.root.prev = None\n\t\tself.display_nodes()", "def pop_front(self):\n if self.n==0:\n print(\"Error; empty list\")\n return\n else:\n temp = self.head # retrieve front node\n self.head = temp.next # assign head to the second node\n self.n -= 1\n return temp.val", "def remove_first(self):\n if self.is_empty(): raise RuntimeError(\"Empty list\")\n\n data = self.head.data\n self.head = self.head.nxt\n self.size -= 1\n\n if self.is_empty(): self.tail = None\n else: self.head.prev = None\n\n return data", "def remove(self, item):\n \"\"\"\n :type item: Node()\n :rtype None\n \"\"\"\n if self.head.getData() == item:\n self.head = self.head.getNext()\n return\n\n prev = curr = self.head\n while curr: \n if curr.getData() == item:\n prev.setNext(curr.getNext())\n break\n prev = curr\n curr = curr.getNext()", "def pop_front(self):\n if self.empty():\n return \"Linked List is Empty\"\n\n h = self.head\n if h.next is None:\n self.head = None\n return h.data\n\n self.head = h.next\n return h.data", "def remove(self, value):\n node = self.first()\n # case 1 : in case of empty list, do nothing and return None\n if node is None:\n return None\n # case 2 : list has at least one element and node to be removed is the first element\n if node.value() == value:\n self.__head = node.next()\n self.__length -= 1\n node.set_next(None)\n return node\n # case 3 : list has at least one element and node to be removed is not the first element\n previous = node\n node = node.next()\n while node is not None:\n if node.value() == value:\n previous.set_next(node.next())\n self.__length -= 1\n node.set_next(None)\n return node\n else:\n node = node.next()\n return None\n\n ##############", "def remove_first(self):\n if self.is_empty():\n raise self.NoSuchNodeException()\n\n tmp_val = self.head.data\n self.head = self.head.next_node\n self.list_size -= 1\n return tmp_val", "def remove_front(self):\n\n if self.items:\n return self.items.pop(0)\n return None", "def _remove_node(self, node):\n prev = node.prev\n new = node.next\n\n prev.next = new\n new.prev = prev", "def remove_index(self, index):\n current = self.head\n position = index\n if index > (self.size() - 1):\n return None\n elif index == 0:\n self.head = current.next_node\n else: \n while position >= 1:\n previous = current\n current = current.next_node\n position -= 1 \n previous.next_node = current.next_node\n\n return current", "def move_to_head(self, node):\n if node is self.head:\n return\n value = node.value\n self.delete(node)\n self.add_to_head(value)", "def pop_head(self):\n if self.head==None:\n return None\n if self.head.getLink()==None:\n temp=self.head\n self.head=None\n self.tail=None\n return temp\n temp = self.head\n self.head=self.head.getLink()\n return temp", "def pop(self):\n prev_node, curr_node = self.head, self.head # use prev = self.head for single-item list\n \n while curr_node.next is not None:\n prev_node = curr_node\n curr_node = curr_node.next\n \n prev_node.next = None\n return None", "def delete_at_beginning(self) -> None:\n current = self.head\n if current is None:\n return None\n else:\n self.head = self.head.get_next_node()\n self.head.set_previous_node(None)\n temp = current.get_data()\n del current\n self._decrease_length()\n return temp", "def remove(self, value):\r\n if self.head is None:\r\n return\r\n\r\n if self.head.value == value:\r\n self.head = self.head.next\r\n return\r\n\r\n node = self.head\r\n while node.next:\r\n if node.next.value == value:\r\n node.next = node.next.next\r\n return\r\n node = node.next", "def delete_first(self):\n if self.is_empty():\n raise Empty('list is empty')\n return self._delete_node(self._head._next)", "def pop_front(self):\n if (self._size == 0):\n return None\n\n output_value = self._head.value\n\n self._head = self._head.next\n self._head.prev = None\n self._size -= 1\n\n # Edge case, list is now empty\n if (self._size == 0):\n self._tail = None\n\n return output_value", "def remove(self , element):\n current = self.head \n previous = None\n\n while current and current.data != element:\n previous = current\n current = current.next\n\n if previous == None :\n self.head = current.next\n elif current :\n previous.next = current.next\n current.next = None", "def remove_node(self, data):\n if not self.head:\n raise Exception(\"List is empty\")\n if self.head.data == data:\n self.head = self.head.next\n return\n previous_node = self.head\n for node in self:\n if node.data == data:\n previous_node.next = node.next\n return\n previous_node = node\n raise Exception(\"Node with data '{}' not found\".format(data))", "def remove(self, data):\n\n traverse = self.head\n temp = self.head\n if self.head == None:\n return None\n\n if traverse.data == data:\n self.head = traverse.next\n return\n\n while traverse.next != None:\n\n temp = traverse.next\n if temp.data == data:\n traverse.next = temp.next\n return\n\n traverse = traverse.next", "def move_to_front(self, node):\n\n if self.size == 0: # no items in list\n return # nothing to move; return out\n\n if self.head is node: # if node is head already\n return # nothing to move, node is at beginning; return out\n\n if self.tail is node: # if node is tail\n self.tail = node.prev # shift tail left\n\n else: # else node must not be tail\n # if node is not tail, then node.next is not None\n node.next.prev = node.prev # sew node.next to node.prev\n\n node.prev.next = node.next # if node=tail next is None; else, next is a node. Works either way!\n self.head.prev = node # assign current head's prev to point at node instead of None\n node.next = self.head # place node before head\n self.head = node # reassign head (shifting left) head is now node\n self.head.prev = None # reassign head.prev to point at None (no nodes before head)", "def _remove_node(self, node):\n previous = node.prev\n next_node = node.next\n\n previous.next = next_node\n next_node.prev = previous", "def remove(self, data):\n\n traverse = self.head\n temp = self.head\n if traverse.data == data:\n self.head = traverse.next\n return\n\n while traverse.next != None:\n\n temp = traverse.next\n if temp.data == data:\n traverse.next = temp.next\n return\n\n traverse = traverse.next", "def remove_elem(self, node):\n if node.prev:\n node.prev.next = node.next\n if node.next:\n node.next.prev = node.prev\n if node is self.head:\n self.head = node.next\n node.prev = None\n node.next = None", "def remove_value(self, value):\n if self.head is None: \n raise ValueError('Deleting from empty list.')\n node = self.head \n if node.value == value: \n self.head = self.head.next_node \n return node \n while node.next_node is not None:\n current = node.next_node \n if current.value == value:\n node.next_node = current.next_node \n return current \n node = current\n raise ValueError('Deleting non-existing value.')", "def remove(self, element):\n if self.head.element == element:\n self.head = self.head.next\n self.head.prev = None\n return None\n cursor = self.head\n while cursor.next is not None:\n if cursor.next.element == element:\n cursor.next = cursor.next.next\n if cursor.next is not None:\n cursor.next.prev = cursor\n break\n else:\n cursor = cursor.next", "def remove_node(self, node):\n # remove the first Node \n if node == self.head:\n return self.remove_first()\n # remove the last Node\n elif node == self.tail:\n \n return self.remove_last()\n # set the skip back pointers after removing the Node and set the\n # preview Node to point on the next one(skip the removing node)\n if node.next != self.tail:\n if node.skip_back is not None:\n node.next.next.skip_back = node.skip_back.next\n else:\n node.next.next.skip_back = self.head\n if node.skip_back is not None:\n node.next.skip_back = node.skip_back\n node.skip_back.next.next = node.next\n else:\n self.head.next = node.next\n node.next.skip_back = None\n # disconnect the Node from the list\n node.next = None\n return node.data", "def remove(self):\r\n if self.first() is not None:\r\n self.dec_size()\r\n self.set_first(self.first().next())\r\n if self.size() == 0: # when there are no more elements in the list,\r\n self.__last = None # remove the pointer to the last element\r", "def pop_head(self):\n if self.is_empty():\n return None\n\n current = self._head._next\n node = self._head\n current._previ = None\n self._head = current\n data = node._data\n nodo = Node(None)\n\n self._size -= 1\n\n return data", "def unshift(self):\n if self.head:\n node = self.head\n\n if self.head == self.tail:\n self.head = None\n self.tail = None\n else:\n self.head = node.next\n self.head.prev = None\n\n return node.value\n\n else:\n return None", "def remove_node(self, value):\n node = self.head\n\n while node:\n if self.head.value == value:\n self.head = self.head.next\n return\n if node.next.value == value:\n node.next = node.next.next\n return\n node = node.next", "def removeNode(self, node):", "def delete_first(self):\n if self.is_empty():\n raise Empty('list is empty')\n answer = self._head._element\n self._head = self._head._next\n self._size -= 1\n if self.is_empty(): # special case as deque is empty\n self._tail = None # removed head had been the tail\n else:\n self._head._prev = None\n return answer", "def removeFront(self):\n if self._size == 0:\n raise AttributeError(\"Cannot removeFront from an empty Deque\")\n \n temp = self._front\n self._front = self._front.getPrevious()\n if self._size == 1:\n # removing only item which is the rear as well as the front item\n self._rear = None\n else:\n self._front.setNext(None)\n self._size -= 1\n \n return temp.getData()", "def remove(self, node):\n current = self.head\n target_node = node\n if target_node == current:\n self.pop()\n elif target_node == self.tail:\n self.shift()\n else:\n while current.next_node:\n try:\n if current.next_node == target_node:\n next_node = current.next_node\n # current.next_node = target_node.next_node\n next_node = target_node.next_node\n # target_node = current.next_node\n next_node.previous_node = current.previous_node\n break\n current = current.next_node\n except AttributeError:\n pass\n else:\n raise AttributeError", "def pop(self):\n res = self.first_node\n self.first_node = self.first_node.next\n return res", "def remove(self, node):\n curr, prev = self.find(node, inc_prev=True)\n if curr:\n self._remove(curr, prev)", "def remove_by_value(self, data):\n pre_node = None\n for n in self:\n if n.data == data:\n if pre_node is None:\n self.pop()\n else:\n pre_node.next = n.next\n break\n pre_node = n\n else:\n raise ValueError(f'value [{data}] not found in linked list')", "def remove(self, node):\n if type(node) is Node:\n prev = None\n curr = self.head\n while curr:\n if curr is node:\n if prev:\n prev.next = curr.next\n else:\n self.head = curr.next\n self._length -= 1\n break\n prev = curr\n curr = curr.next\n else:\n raise ValueError(\"Cannot remove node not in list.\")\n else:\n raise ValueError(\"Argument to remove must be of node type.\")", "def remove_value(self, value):\n if self.head is None: \n raise ValueError('Deleting from empty list.')\n node = self.head \n if node.value == value: \n self.head = self.head.next_node \n if self.head is None: \n self.tail = None\n else:\n self.head.prev_node = None \n return node \n while node.next_node is not None:\n node = node.next_node \n if node.value == value:\n node.prev_node.next_node = node.next_node \n if node.next_node is None: \n self.tail = node.prev_node \n else:\n node.next_node.prev_node = node.prev_node\n return node\n raise ValueError('Deleting non-existing value.')", "def _move_to_head(self, node):\n self._remove_node(node)\n self._add_node(node)", "def detach_node(self, node):\n # if the node is at the end\n if self.end == node:\n self.pop()\n\n # elif it's at the beginning\n elif self.begin == node:\n # call unshift\n self.unshift()\n #else it's in the middle\n else:\n # skip over it\n # save node.prev, node.next\n prev = node.prev\n next = node.next\n # set prev.next to saved next\n node.prev.next = next\n # set next.prev to saved prev\n node.next.prev = prev", "def pop_back(self):\n if self.head is None:\n raise IndexError('pop_back to empty list')\n node = self.tail \n if node.prev_node is None:\n self.head = None\n else:\n node.prev_node.next_node = None\n self.tail = node.prev_node\n return node.value", "def delete_node_at_end(self):\n if not self.head:\n print('List already empty')\n return\n temp = self.head\n while temp.next:\n if not temp.next.next:\n break\n temp = temp.next\n temp.next = None", "def push_front(self, val: Generic[T]) -> None:\n first_node = self.node.next\n\n self.node.next = Node(val)\n latest_first = self.node.next\n\n latest_first.prev = self.node #pushes the node to the front\n latest_first.next = first_node\n first_node.prev = latest_first #rearranges the list", "def remove(self, item):\n \n previous = None\n current = self.head\n \n while current is not None:\n \n if current.get_data() == item:\n # If the item to be removed is the first item\n if previous is None:\n self.head = current.get_next()\n else:\n previous.set_next(current.get_next())\n return\n \n else:\n previous = current\n current = current.get_next()", "def pop(self):\n return super().remove_item_from_front()", "def remove(self, val):\n current_node = self.head\n previous_node = None\n\n while current_node:\n if current_node.val == val:\n if previous_node:\n previous_node.next = current_node.next\n else:\n self.head = current_node.next\n\n previous_node = current_node\n current_node = current_node.next", "def remove(self, d):\n\n if self.head is not None:\n if self.head.data == d:\n self.head = self.head.next\n else:\n temp = self.head\n while temp.next is not None:\n if temp.next.data == d:\n temp.next = temp.next.next\n break\n else:\n temp = temp.next", "def remove(self, key):\n if self.head is None:\n print('Cannot remove from empty list!')\n return\n if self.head.data == key:\n self.head = self.head.next\n return\n\n itr = self.head\n prev = ListNode()\n while itr:\n curr = itr\n if itr.data == key:\n prev.next = curr.next\n return\n prev = curr\n itr = itr.next", "def _remove(self, curr, prev):\n if prev:\n # If there is a previous node then update it's next attribute\n # to refer to the next node of the node that is being removed.\n prev.next = curr.next\n else:\n # If there is no previous node then we are at the head of the list.\n # Update the first_node reference to the next node of the node \n # that is being removed.\n self.first_node = curr.next\n # Delete the node that has been delinked.\n del curr", "def remove_a_specific_item(self, index):\n\n current = self.head\n previous = None\n for i in range(index):\n previous = current\n current = current.next\n if previous is None: self.head = current.next\n else: previous.next = current.next\n self.size -= 1", "def remove(self, item):\n \n if self.head is None:\n raise EmptyListError\n \n if self.head.data == item:\n self.remove_head()\n return print(f'{item} removed')\n \n prev_node, curr_node = None, self.head \n \n while curr_node is not None:\n if curr_node.data == item:\n prev_node.next = curr_node.next\n return print(f'{item} removed')\n prev_node = curr_node\n curr_node = curr_node.next\n \n print(\"item not found\")", "def remove(self, key):\n if self.head.data == key: # checking first corner case of first node to be removed\n self.head = self.head.next\n return\n\n elif self.head is None: # checking second corner case of linked list being empty\n return\n\n else: # otherwise maintain two pointers and remove the required node\n curr_node = self.head.next\n prev_node = self.head\n while prev_node.next is not None:\n if curr_node.data == key:\n prev_node.next = curr_node.next\n return\n\n return", "def delete_at_index(self, index: int) -> T:\n try:\n previous_node = self.__get_node_at_index(index-1)\n except ValueError as e:\n if self.is_empty(): \n raise ValueError(\"List is empty\")\n elif index == 0:\n item = self.head.items\n self.head = self.head.link\n else:\n raise e\n else:\n item = previous_node.link.items\n previous_node.link = previous_node.link.link\n self.length -= 1\n return item", "def popFirst(self):\n\n if self.firstItem == None:\n raise Exception(\"cannot popFirst - linked list is empty\")\n\n oldFirstItem = self.firstItem\n self.firstItem = oldFirstItem.next\n return oldFirstItem", "def remove(self, index=0):\n # Error case: Index out of acceptable range\n if index < 0 or index >= self._size:\n raise RangeError(\"index out of range.\")\n\n # Edge case: Remove from front of list\n # Behave list pop_front()\n if (index == 0):\n return self.pop_front()\n\n # Edge case: Remove from end of list\n # Behave list pop_back()\n if (index == self._size - 1):\n return self.pop_back()\n\n i = 1\n current_node = self._head.next\n\n while(i < index):\n current_node = current_node.next\n i += 1\n\n current_node.prev.next = current_node.next\n current_node.next.prev = current_node.prev\n self._size -= 1\n\n return current_node.value", "def _dequeue(self):\n node = self.head.next\n self._remove_node(node)\n return node", "def remove_first(self):\n if self.is_empty():\n raise IndexError\n else:\n self._first = self._rest._first\n if self._rest.is_empty():\n self._rest = None\n else:\n self._rest = self._rest._rest", "def remove(self,index=0):\n if index>self.size-1: raise IndexError(\"Index out of range.\")\n elif self.size==1: self.reference=None\n else:\n pointer = self.reference\n for i in range(index): pointer = pointer.next\n pointer.previous.next, pointer.next.previous = pointer.next, pointer.previous\n if index==0: self.reference=self.reference.next\n self.size-=1", "def front_node(self):\n return self.sentinel.next if self.N != 0 else None", "def delete(self):\n if self.head is None:\n return None\n item = self.head.data\n self.head = self.head.next\n return item", "def delete_list(self): \n temp_node = self.head\n while temp_node is not None:\n prev_node = temp_node\n temp_node = temp_node.next\n # prev_node.val += \": deleted\" # for sanity check\n # reset data\n prev_node.val = None\n prev_node.next = None", "def _delete_node(self, node):\n\n if self.is_empty():\n raise Empty(\"List is empty!\")\n\n predecessor = node._prev\n successor = node._next\n\n predecessor._next = successor\n successor._prev = predecessor\n\n elem = node._element\n node._prev = node._next = node._element = None\n\n self._size -= 1\n\n return elem", "def remove(self,p):\r\n \r\n if p == self.head: #if p is the head node\r\n self.head = p.next #set the next node of p to be the 'new' head node\r\n (p.next).prev = None #remove the node at p\r\n p.next = None\r\n \r\n elif p == self.tail: #if p is the tail node\r\n self.tail = p.prev #set the prev node of p to be the 'new' tail node\r\n (p.prev).next = None #remove the node at p\r\n p.prev = None\r\n \r\n else:\r\n (p.prev).next = p.next #linking out p\r\n (p.next).prev = p.prev\r\n p.prev = None #invalidating the position p\r\n p.next = None\r\n\r\n self.size -=1 #decrease length of linked list by 1\r", "def delete(self, index):\n # check validity of index:\n if index < 0 or index > self.n:\n print(\"Index Error; please input valid index\")\n return\n # if head element is to be removed,\n if index == 0:\n _ = self.pop_front()\n return\n # else,\n temp_node = self.head\n for _ in range(index-1):\n temp_node = temp_node.next # traverse the list\n index_node = temp_node.next\n # unlink\n temp_node.next = temp_node.next.next\n index_node = None\n self.n -= 1", "def delete_node_at_index(self, index):\n if index < 0 or index >= self.size:\n return\n\n curr = self.head\n if index == 0:\n self.head = curr.next\n else:\n for i in range(index - 1):\n curr = curr.next\n curr.next = curr.next.next\n\n self.size -= 1", "def remove(self, data):\n prev = None\n curr = self.head\n while curr != None:\n if curr.data == data:\n self.size -= 1\n if curr == self.head:\n self.head = curr.next\n else:\n prev.next = curr.next\n if curr == self.tail:\n self.tail = prev\n return curr\n else: \n prev = curr\n curr = curr.next\n return None", "def remove(self,value):\n if self.is_empty():\n return\n current = self._head\n if current.value == value:\n self._head = self._head.next\n elif current.next is None:\n # Contains one element only, but it is not the one we are looking for.\n return\n else:\n while current.next.value != value:\n current = current.next\n if current.next is None: # Remove value not found.\n return\n\n # Find removed value, remove it.\n current.next = current.next.next\n if current.next is None:\n self._tail = current\n self._size -= 1", "def delete_first(self):\n if self._size == 0:\n raise Empty('Dequeue is empty')\n return self._delete_node(self._head._next)", "def delete(self, index):\n if index == 0 and self.head is not None:\n self.head = self.head.next\n return\n\n current_index = 0\n current = self.head\n previous = None\n\n while current:\n if current_index == index:\n previous.next = current.next\n\n previous = current\n current = current.next\n current_index += 1", "def remove(self, item: Any) -> None:\n curr = self._first\n\n if not curr:\n raise ValueError\n\n elif curr.item == item:\n self._first = self._first.next\n self._length -= 1\n\n else:\n while curr is not None:\n if curr.next and curr.next.item == item:\n curr.next = curr.next.next\n self._length -= 1\n return\n curr = curr.next\n raise ValueError", "def delete(self, value):\n current = self.head\n if current.value == value:\n self.head = current.next\n else:\n while current:\n if current.value == value:\n break\n prev = current\n current = current.next\n if current == None:\n return\n prev.next = current.next\n current = None", "def removeFirst(self):\n if self.__nelems == 0:\n raise BaseException('Empty List')\n\n temp = self.__head\n if self.__nelems == 1:\n self.__head = self.__tail = None\n\n else:\n self.__head = temp.getNext()\n temp.setNext(None)\n\n self.__nelems -= 1\n\n return temp.getData()", "def remove_second(list):\n if list is None: return\n first = list\n second = list.next\n # Make the first node refer to the third\n first.next = second.next\n # Separate the second node from the rest of the list\n second.next = None\n return second", "def delete_from_tail(self):\n\n current = self.head\n #get the node right before the tail\n while current != None:\n if current.next == self.tail:\n current.next = None\n return\n current = current.next", "def dequeue(self): ##################### <-\n \"\"\"Llst -> lst, dequeue, lst -> Llst\"\"\"\n top = self.top\n def linkedLstToList(Llst):\n \"\"\"A list to a single node linked list\"\"\"\n if not Llst: return []\n return [Llst.value] + linkedLstToList(Llst.next)\n def lstToLinkedList(lst):\n \"\"\"A list to a single node linked list\"\"\"\n if not lst: return\n LinkedList = Node(lst[0])\n LinkedList.next = lstToLinkedList(lst[1:])\n return LinkedList\n self.top = lstToLinkedList(linkedLstToList(top)[:-1])\n return linkedLstToList(top)[-1]", "def delete(self, value):\n current = self.head\n prev = None\n\n while current:\n if current.value == value:\n if prev == None:\n self.head = current.next\n else:\n prev.next = current.next\n break\n prev = current\n current = current.next", "def delete_first(self):\n if self.is_empty():\n raise Empty(\"List is empty\")\n return self._delete_node(self._header._next)", "def pop(self):\n popped_node = self.head\n # popped = self.head.data\n # new_head = self.head.next_node\n # self.head = new_head\n self.head = popped_node.next_node\n self.head.previous_node = None\n return popped_node.data", "def remove(self, obj):\n\n if self.begin.value == obj:\n self.begin = self.begin.next\n self.begin.prev = None\n\n elif self.end.value == obj:\n self.end = self.end.prev\n self.end.next = None\n\n else:\n node = self.begin\n while node.value != obj:\n node = node.next\n node.prev.next = node.next\n node.next.prev = node.prev", "def delete_node(self, node):\n curr = self.head\n while curr.next is not None:\n if curr.next == node:\n break\n curr = curr.next\n curr.next = node.next\n node = None\n return", "def push_front(self, item):\n new_node = Node(item)\n # if the list is empty, make it head\n if self.head is None:\n self.head = new_node\n # else, \n else:\n new_node.next = self.head # new node points to current head\n self.head = new_node # current head points to new_node\n self.n += 1", "def erase(self, index):\n node = self._get_node_at(index) \n if node is None:\n raise IndexError('List index out of range.') \n if node == self.head: \n if node.next_node is None:\n self.tail = None \n else: \n node.next_node.prev_node = None \n self.head = node.next_node\n elif node == self.tail: \n node.prev_node.next_node = None \n self.tail = node.prev_node\n else: \n node.prev_node.next_node = node.next_node\n node.next_node.prev_node = node.prev_node\n return node.value", "def delete(self, value):\n current = self.head\n previous = None\n while current.value != value and current.next:\n previous = current\n current = current.next\n if current.value == value:\n if previous:\n previous.next = current.next\n else:\n self.head = current.next\n pass", "def pop(self, index):\n if index < 0 or index >= self.length:\n raise \"Index out of range\"\n node = self.head\n i=0\n while i<=index:\n if i==index:\n if self.current == node:\n self.current = node.next if node.next else node.prev\n if node.prev:\n node.prev.next = node.next\n node.next.prev = node.prev\n else:\n self.head = node.next\n break\n node = node.next\n i = i+1\n self.length = self.length - 1", "def removeNode(node, head, tail) :\n if node is head :\n node = None\n return (None, None)\n if node is tail :\n tail = tail.prev\n tail.next = None\n node.prev = None\n return (head, tail)\n node.prev.next = node.next\n node.next.prev = node.prev\n node.next = None\n node.prev = None\n node = None\n return (head, tail)", "def remove_min(self) -> Optional[T]:\n if self._array == []:\n return None\n else:\n # Remove top node\n value = self._array[0]\n self._array = self._array[1:]\n # If nodes remaing in the min heap...\n if self._array:\n # Move end node to the top\n end_node = self._array.pop()\n self._array = [end_node] + self._array\n # Rebuild the heap (heapify)\n self.__build()\n # Return the top node\n return value", "def pop_first(self):\n self.pop_item(0)", "def remove(self, item):\n \n previous = None\n current = self.head\n \n while current is not None:\n \n if current.get_data() == item:\n # If the item to be removed is the first item\n if previous is None:\n self.head = current.get_next()\n else:\n previous.set_next(current.get_next())\n return\n \n # Early stop\n elif current.get_data() > item:\n return\n \n else:\n previous = current\n current = current.get_next()", "def deleteHead(self):\n if not self._head:\n return\n\n if self._head is self._tail:\n self._head = None\n self._tail = None\n else:\n self._head = self._head.next\n self._size -= 1", "def push_front(self, data):\n n = Node(data)\n if self.empty():\n self.head = n\n return\n\n l = self.head\n self.head = n\n n.next = l\n return", "def remove(self, key):\n index = self._hash_mod(key)\n node = self.storage[index]\n node_before = None\n if node:\n while node:\n if node.key == key:\n if node_before:\n node_before.next = node.next\n elif node.next:\n self.storage[index] = node.next\n else:\n self.storage[index] = None\n self.key_count -= 1\n return\n node_before = node\n node = node.next\n print(f\"An element with key '{key}' cannot be found!\")" ]
[ "0.7608564", "0.7596137", "0.75486904", "0.73908806", "0.7308479", "0.7298663", "0.72594917", "0.72341925", "0.72223204", "0.72218394", "0.72170967", "0.7206209", "0.7150894", "0.71452713", "0.7012732", "0.6987773", "0.69702953", "0.6955942", "0.6948797", "0.69455516", "0.69202185", "0.6906258", "0.69018716", "0.6890063", "0.6882392", "0.6875783", "0.68647575", "0.6859727", "0.6848555", "0.6816136", "0.681087", "0.67938125", "0.67787886", "0.6772353", "0.6770696", "0.67578036", "0.6734231", "0.67303306", "0.6729934", "0.67268085", "0.6705371", "0.6685397", "0.66713536", "0.6667064", "0.6665005", "0.66568565", "0.66435885", "0.66407335", "0.66169995", "0.66140866", "0.66067386", "0.6586732", "0.65804505", "0.6553455", "0.6548826", "0.65447474", "0.65354663", "0.652264", "0.6516658", "0.65165275", "0.6514014", "0.65138805", "0.65085125", "0.6505711", "0.6476444", "0.64618003", "0.6459764", "0.64581597", "0.6454981", "0.6438418", "0.6433264", "0.64332426", "0.64291775", "0.6421866", "0.6418631", "0.6416588", "0.64120924", "0.6408359", "0.6403999", "0.640159", "0.6395328", "0.6391805", "0.6369941", "0.6365311", "0.6354689", "0.63541514", "0.6348061", "0.6333411", "0.63329464", "0.6331945", "0.633173", "0.63239896", "0.6316086", "0.63096356", "0.63072914", "0.6303777", "0.6301701", "0.6295013", "0.62902296", "0.6288593" ]
0.70528644
14
Sorts the singly linked list using a placeholder list.
def insertion_sort(self): if self.is_empty(): return #new_list = LinkedList() curr_ele = self.head curr_ele = curr_ele.next_node while (curr_ele is not None): new = self.head while new !=curr_ele: if new.value > curr_ele.value: holder = curr_ele.value curr_ele.value = new.value new.value = holder else: new = new.next_node curr_ele = curr_ele.next_node
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sort_1(l):\n pass", "def reorderList(self, head: ListNode) -> None:\n node_list = []\n dummy = head\n while dummy:\n node_list.append(dummy)\n dummy = dummy.next\n \n\n l, r = 0, len(node_list) - 1\n while r >= l:\n if l == r:\n # odd case\n node_list[l].next = None\n else:\n # even case\n if r == l+1:\n node_list[r].next = None\n else:\n node_list[l].next = node_list[r]\n node_list[r].next = node_list[l+1]\n l += 1\n r -= 1", "def insertionSortList(self, head: 'ListNode') -> 'ListNode':\n if not head:\n return head\n dummy = ListNode(-1)\n dummy.next = head\n remaining_list = head.next\n head.next = None\n while remaining_list:\n prev = dummy\n cur = dummy.next\n while cur and cur.val < remaining_list.val:\n prev = cur\n cur = cur.next\n \n temp = remaining_list.next\n remaining_list.next = prev.next\n prev.next = remaining_list\n remaining_list = temp\n \n return dummy.next", "def sort_0(l):\n l.sort()", "def sort(self):\n def get_fval(res):\n return res.fval if not np.isnan(res.fval) else np.inf\n\n self.list = sorted(self.list, key=get_fval)", "def reorderList(self, head: ListNode) -> None:\n if head is None:\n return head\n dummy_head = ListNode(-1)\n slow = head\n fast = head\n while fast.next and fast.next.next:\n fast = fast.next.next\n slow = slow.next\n start = slow.next\n slow.next = None\n pre = None\n cur = start\n while cur:\n next = cur.next\n cur.next = pre\n pre = cur\n cur = next\n head2 = pre\n cur = dummy_head\n cur1 = head\n cur2 = head2\n while cur1 or cur2:\n if cur1:\n cur.next = cur1\n cur1 = cur1.next\n cur = cur.next\n if cur2:\n cur.next = cur2\n cur2 = cur2.next\n cur = cur.next\n return dummy_head.next", "def sort_list(self,list_):\r\n list_.sort()", "def reorderList(self, head: ListNode) -> None:\n if head is None or head.next is None:\n return\n\n # middle\n slow,fast = head, head\n while fast.next is not None and fast.next.next is not None:\n slow = slow.next\n fast = fast.next.next\n\n # reverse\n fast = self.reverse(slow.next)\n slow.next = None\n slow = head\n\n\n # merge\n current = fast\n while fast is not None:\n current = slow.next\n slow.next = fast\n fast = fast.next\n slow.next.next = current\n slow = current", "def reorderList(self, head: ListNode) -> None:\n if not head or not head.next or not head.next.next:\n return\n def helper(first_node, last_last_node=None):\n if not last_last_node:\n last_last_node = head\n while last_last_node.next and last_last_node.next.next:\n last_last_node = last_last_node.next\n last_node = last_last_node.next\n if first_node == last_node:\n return\n if first_node.next == last_node:\n return\n first_node.next = last_node\n last_node.next = first_node.next\n helper(first_node.next, last_last_node)\n helper(head)", "def reorderList(self, head: ListNode) -> None:\n if not head: return\n left, right, stop = head, head, False\n \n def recurse(right):\n \n nonlocal left, stop\n if not right:\n return\n \n prev, right = right, right.next\n recurse(right)\n if left == prev or (prev and prev.next == left):\n stop = True\n if not stop:\n next_, left.next= left.next, prev\n left, prev.next = next_, next_\n\n\n recurse(right)\n left.next = None", "def reorderList(self, head: ListNode) -> None:\n length = 0\n curr = head\n node_dict = {}\n while curr is not None:\n node_dict[length] = curr\n curr = curr.next\n length += 1\n\n j = length - 1\n for i in range(length):\n if node_dict[i].next is None or node_dict[i].next == node_dict[j]:\n break\n i_next = node_dict[i].next\n j_next = node_dict[j].next\n node_dict[i].next = node_dict[j]\n node_dict[j].next = i_next\n node_dict[j - 1].next = j_next\n j -= 1", "def reorderList(self, head: ListNode) -> None:\r\n if head == None:\r\n return head\r\n\r\n temp = head\r\n count_size = 0\r\n\r\n while temp is not None:\r\n count_size+=1\r\n temp = temp.next\r\n\r\n temp = head\r\n head1 = None\r\n break_point = 0\r\n\r\n while temp is not None:\r\n break_point+=1\r\n\r\n if count_size % 2 == 0 and break_point == count_size // 2:\r\n break\r\n elif count_size % 2 == 1 and break_point == (count_size // 2)+1:\r\n break\r\n\r\n temp = temp.next\r\n\r\n\r\n head1 = temp.next\r\n temp.next = None\r\n\r\n head1 = reverse(head1)\r\n\r\n temp1 = head\r\n ptr , ptr1 = None,None\r\n\r\n while temp1 and head1:\r\n ptr = head1\r\n head1 = head1.next\r\n ptr1 = temp1.next\r\n temp1.next = ptr\r\n ptr.next = ptr1\r\n temp1 = temp1.next.next\r\n\r\n return head", "def reorderList(self, head: ListNode) -> None:\n if head:\n fast, slow = head, head\n while True:\n if fast.next is None or fast.next.next is None:\n break\n fast = fast.next.next\n slow = slow.next\n \n rev = slow.next\n pre = None\n while rev:\n next = rev.next\n rev.next = pre\n pre = rev\n rev = next\n slow.next = pre\n\n start = head\n while pre:\n next = start.next\n start.next = pre\n pre = pre.next\n if pre:\n start.next.next = next\n start = next\n if not pre:\n if next is slow:\n next.next = None\n start.next.next = next\n\n # return head", "def anythingSort(L):\n return internalSort(L, 0, len(L) - 1)", "def insertionSort(list):", "def reorderList(self, head: ListNode) -> None:\n if not head:\n return\n len = 1\n tmp = head\n while tmp.next:\n tmp = tmp.next\n len += 1\n if len <= 2:\n return\n tmp = head\n for _ in range(len//2):\n tmp = tmp.next\n nxt = tmp.next\n tmp.next = None\n while nxt.next:\n nnxt = nxt.next\n nxt.next = tmp\n tmp = nxt\n nxt = nnxt\n nxt.next = tmp\n tmp = head\n for i in range(len//2 - 1):\n tmp_next = tmp.next\n nxt_prev = nxt.next\n tmp.next = nxt\n nxt.next = tmp_next\n tmp = tmp_next\n nxt = nxt_prev\n tmp.next = nxt", "def reorderList(self, head: ListNode) -> None:\n if not head:\n return \n # Find the middle node\n slow = fast = head\n while fast and fast.next:\n slow = slow.next\n fast = fast.next.next\n \n # reverse the second half\n prev = None\n while slow:\n temp = slow\n slow = slow.next\n temp.next = prev\n prev = temp\n \n start = head\n while prev.next:\n start.next, start = prev, start.next\n prev.next, prev = start, prev.next", "def reorderList(self, head: Optional[ListNode]) -> None:\n prev, slow, fast = None, head, head\n\n while fast and fast.next:\n prev, slow, fast = slow, slow.next, fast.next.next\n \n if not prev: return head\n\n # reverse 2nd helf\n def reverse(root):\n node, nxt = root, root.next\n node.next = None\n\n while node and nxt:\n tmp = nxt.next\n nxt.next, node = node, nxt\n nxt = tmp\n return node\n\n # cut linklist and reverse 2nd half\n prev.next = None\n x = reverse(slow)\n\n dummyHead = ListNode(0)\n cur = dummyHead\n while head or x:\n if head: cur.next, cur, head = head, head, head.next\n if x: cur.next, cur, x = x, x, x.next\n\n return dummyHead.next", "def reorderList(self, head: ListNode) -> None:\n if not head or not head.next:\n return\n # Split the list\n first, second = head, head\n while first and first.next:\n first, second = first.next.next, second.next\n mid, p = second.next, second.next\n second.next = None\n # Reverse the second half\n while p and p.next:\n nxt = p.next\n p.next = nxt.next\n nxt.next = mid\n mid = nxt\n # Interweave\n p1, p2 = head, mid\n while p1 and p2:\n p1nxt, p2nxt = p1.next, p2.next\n p1.next, p2.next = p2, p1nxt\n p1, p2 = p1nxt, p2nxt", "def sorted(x) -> List:\n pass", "def reorderList(self, head: ListNode) -> None:\n if not head:\n return None\n\n def reverse(head):\n newnode = None\n cur = head\n while cur:\n nextnode = cur.next\n cur.next=newnode\n newnode = cur\n cur = nextnode\n return newnode\n\n def getmid(head):\n slow, fast = head, head\n while fast.next and fast.next.next:\n slow = slow.next\n fast = fast.next.next\n return slow\n\n def merge(l1, l2):\n while l1 and l2:\n l1_tmp = l1.next\n l2_tmp = l2.next\n\n l1.next = l2\n l1 = l1_tmp\n\n l2.next = l1\n l2 = l2_tmp\n ptr1 = head\n left_mid = getmid(head)\n mid = left_mid.next\n left_mid.next=None\n\n ptr2 = reverse(mid)\n merge(ptr1, ptr2)", "def sort_2(l):\n l.reverse()", "def mysort(lst: List[T], compare: Callable[[T, T], int]) -> List[T]:\n temp = lst\n switched = True\n while switched:\n switched = False\n for i in range(len(temp) - 1):\n if compare(temp[i], temp[i + 1]) == 1:\n temp[i], temp[i + 1] = temp[i + 1], temp[i]\n switched = True\n\n return temp", "def reorderList(self, head: ListNode) -> None:\n if not head:\n return\n\n # find the mid point\n slow = fast = head\n while fast and fast.next:\n slow = slow.next\n fast = fast.next.next\n\n # reverse the second half in-place\n # 4 -> 5 -> 6 = 6 -> 5 -> 4\n pre, node = None, slow\n while node:\n pre, node.next, node = node, pre, node.next\n\n # Merge in-place; Note : the last node of \"first\" and \"second\" are the same\n first, second = head, pre\n while second.next:\n first.next, first = second, first.next\n second.next, second = first, second.next\n return", "def reorderList(self, head: ListNode) -> None:\n if not head:\n return None\n slow = fast = head\n while True: # find the middle node\n if not fast.next or not fast.next.next:\n break\n fast = fast.next.next\n slow = slow.next\n head_1 = head # first half of linked list\n head_2 = self.rev(slow.next, None)[0] if slow.next else None # second half\n slow.next = None\n # build result with above two halves\n dummy = runner = ListNode(0)\n while head_2:\n runner.next = head_1 # order matters!!!\n head_1 = head_1.next\n runner = runner.next # runner at head_1\n runner.next = head_2 # point head_1's next to head_2\n head_2 = head_2.next\n runner = runner.next # runner at head_2\n if head_1: # first half may have 1 additional node\n runner.next = head_1\n head_1.next = None\n return dummy.next", "def reorderList(self, head: Optional[ListNode]) -> None:\n # Find the mid of the list\n slow, fast = head, head\n while fast and fast.next:\n slow = slow.next\n fast = fast.next.next\n \n # Reverse second half\n prev, curr = None, slow\n while curr:\n prev, curr.next, curr = curr, prev, curr.next\n \n # Splice together nodes\n list1, list2 = head, prev\n dummy = ListNode()\n curr = dummy\n toggle = 1\n while list1 and list2:\n if toggle == 1:\n curr.next = list1\n list1 = list1.next\n curr = curr.next\n else:\n curr.next = list2\n list2 = list2.next\n curr = curr.next\n toggle *= -1\n return dummy.next", "def reorderList(self, head: ListNode) -> None:\n nodes = []\n curNode = head\n while curNode:\n nodes.append(curNode)\n curNode = curNode.next\n\n if len(nodes) <= 2:\n return head\n\n startIdx, endIdx = 0, len(nodes)-1\n while startIdx < endIdx:\n nodes[startIdx].next = nodes[endIdx]\n nodes[endIdx].next = nodes[startIdx+1]\n startIdx += 1\n endIdx -= 1\n\n if startIdx == endIdx:\n nodes[startIdx].next = None\n else:\n nodes[endIdx+1].next = None\n\n return head", "def insertionSortList(self, head):\n # special case, list is empty or has only 1 node\n if not head or not head.next:\n return head\n\n tail = head # pointer to tail node of already sorted linked list\n curr = head.next\n while curr:\n if curr.val < tail.val:\n link = curr.next # save link to the next node\n # insert curr node in sorted part of the list\n head = self.insert_node(head, curr)\n tail.next = link\n curr = link\n else:\n tail = tail.next\n curr = curr.next\n return head", "def sort_3(l):\n l.sort(reverse=True)", "def reorderList(self, head: ListNode) -> None:\n def reverse(head):\n dummy = ListNode(0)\n dummy.next = head\n tail = dummy\n while head and head.next:\n tmp = head.next\n head.next = tmp.next\n tmp.next = tail.next\n tail.next = tmp\n return dummy.next\n \n if not head:\n return\n # ensure the first part has the same or one more node\n fast, slow = head.next, head\n while fast and fast.next:\n fast = fast.next.next\n slow = slow.next\n \n # reverse the second half\n node = reverse(slow.next)\n slow.next = None\n \n # combine head part and node part\n p = head\n while node:\n tmp = node.next\n node.next = p.next\n p.next = node\n p = node.next\n node = tmp", "def reorderList(self, head: ListNode) -> None:\n if not head or not head.next:\n return\n #The list was divided into two segments, \n #with prev pointing to the last node in the first,\n #cur to the head of the second\n prev,cur,front = None,head,head\n while front and front.next:\n prev = cur\n cur = cur.next\n front = front.next.next\n prev.next = None\n \n head2 = self.reverseList(cur)\n \n q = head\n while head2 and q:\n p = head2\n head2 = head2.next\n\n p.next = q.next\n q.next = p\n \n if p.next:\n q = p.next\n else:\n p.next = head2\n break", "def sort_list():\n fun_list = basic_list_exception.make_list()\n fun_list.sort()\n return fun_list", "def mySort(L):\n clear = False\n while not clear:\n clear = True\n for j in range(1, len(L)):\n if L[j-1] > L[j]:\n clear = False\n temp = L[j]\n L[j] = L[j-1]\n L[j-1] = temp", "def sorting_alg(self, my_list):\n for i in range(len(my_list)):\n for j in range(i+1, len(my_list)):\n if my_list[i] > my_list[j]:\n my_list[i], my_list[j] = my_list[j], my_list[i]\n #print(my_list)\n #sleep(1)\n return my_list", "def sort_by_default(self):\n self.data.sort()", "def __SortLists(self): \n\n \n AS=argsort(self.__NumList)\n\n self.__IndList=[self.__IndList[i] for i in AS]#list(self.__IndList[AS])\n self.__ObjList=[self.__ObjList[i] for i in AS]#list(self.__IndList[AS])\n self.__NumList=[self.__NumList[i] for i in AS]", "def sort(*, list : Union[List[Any], ConduitVariable], reverse : bool = False) -> None:\n list.sort(key = None, reverse = reverse)", "def mysort(lst: List[T], compare: Callable[[T, T], int]) -> List[T]:\n for i in range(1, len(lst)): #loops through each element starting at the second one\n for j in range(i, 0, -1): #loops through each element coming before i starting at i and going backwards\n if compare(lst[j], lst[j-1]) < 0: #checks to see if the previous element is smaller than the current (by saying <0 we keep the sort stable as well)\n lst[j], lst[j-1] = lst[j-1], lst[j] #if they are, we switch them\n else:\n break #if they are not, we know that the element is in its proper place\n return lst", "def gnomesort(self):\n # nothing to do if we're empty or singleton\n if len(self) < 2:\n return\n # start with second element, and always compare to the element before\n current = self.first.next\n while current is not None:\n # thus current must have a .prev\n # If this element is unsorted with the element before it, then\n if current.prev and current.value < current.prev.value:\n # swap this element with the element before it\n # using insert_after and pop_before is an easy way to handle first/last identities\n self.insert_after(current, self.pop_before(current))\n # and then check the new previous-element.\n else:\n # advance to next node (or None if this is the last node in the list, in which case we terminate)\n current = current.next", "def sort(self):\n self.fragment_list.sort()", "def sort(self, *args: Any, **kwargs: Any) -> BaseList:\n super().sort(*args, **kwargs)\n return self", "def sort(lst):\n \"*** YOUR CODE HERE ***\"\n if len(lst) <= 0:\n return []\n return [min(lst)] + sort(remove_first(lst, min(lst)))", "def s_sort(l):\r\n for x in range(len(l)): # while 1st index in range of length of list...\r\n min_i = x # set 1st index as current min index, to be referenced/compared against\r\n\r\n for y in range(x + 1, len(l)): # while 2nd index in range of 1st+1 & length of list...\r\n if l[min_i] > l[y]: # if 1st is greater than 2nd, set 2nd as new min index\r\n min_i = y\r\n\r\n l[x], l[min_i] = l[min_i], l[x] # swap new min with original reference\r", "def reorderList(head: ListNode) -> None:\n # Edge cases -> 0, 1, or 2 nodes in list\n if not head:\n return []\n if not head.next:\n return head\n if not head.next.next:\n return head\n # Build stack using nodes in order\n stack = []\n node = head\n L = 0 # Keep track of length\n while node:\n stack += [node]\n node = node.next\n L += 1\n # Add head and tail to reordered list\n left_node = head\n right_node = stack.pop()\n left_next = left_node.next\n left_node.next = right_node\n # Add more nodes to list\n k = 2 # k is number of nodes in new list\n while k < L:\n # Even index -> add from left\n if k % 2 == 0:\n left_node = left_next\n right_node.next = left_node\n last = left_node\n # Odd index -> add from right\n else:\n left_next = left_node.next\n right_node = stack.pop()\n left_node.next = right_node\n last = right_node\n k += 1\n # Set next field of last node to None\n last.next = None", "def sort(self, *pargs, **kwargs):\n if self._can_sort(*pargs, **kwargs):\n list.sort(self, *pargs, **kwargs)", "def _sorting(self, notsorted_list, predecessors):\n remaining_nodes = []\n sorted_part = []\n for nd in notsorted_list:\n if not predecessors[nd.name]:\n sorted_part.append(nd)\n else:\n remaining_nodes.append(nd)\n return sorted_part, remaining_nodes", "def sort(self, *args, **kargs):\n list.sort(self, *args, **kargs)\n self.emit('modified')", "def sortListWithHash(list,order_by,order_by_hash,default,desc):\n if order_by_hash.has_key(order_by):\n return sortList(list,order_by_hash[order_by],\"\",desc)\n else:\n return sortList(list,order_by_hash[default],\"\",desc)", "def buble_sort(lst):\n lst_sorted = copy.copy(lst)\n for i in range(len(lst_sorted)):\n for j in range(len(lst_sorted)):\n if j == len(lst_sorted) - 1:\n continue\n if lst_sorted[j][1] > lst_sorted[j + 1][1]:\n lst_sorted[j], lst_sorted[j+1] = lst_sorted[j+1], lst_sorted[j]\n\n return lst_sorted", "def reorder(l: List[Any]) -> List[Any]:\n sorted_list: List[Any] = list()\n sorted_list.append(l[0])\n for i in range(1, len(l)):\n index: int = 0\n while index < i and l[i] > sorted_list[index]:\n index += 1\n sorted_list.insert(index, l[i])\n return sorted_list", "def sortListWithHash(list,order_by,order_by_hash,default,desc):\n if order_by_hash.has_key(order_by):\n\treturn sortList(list,order_by_hash[order_by],\"\",desc)\n else:\n\treturn sortList(list,order_by_hash[default],\"\",desc)", "def __sort(self, _list, _index, desc, pop_first_element=False):\n if _index != 0:\n _list = [(x[_index], x) for x in _list]\n \n _list.sort()\n \n if desc:\n _list.reverse()\n\n if _index != 0 or pop_first_element: \n _list = [x[1] for x in _list]\n\n return _list", "def quick_sort(lst, first, last):\r\n if first < last:\r\n split_marker = split_list(lst, first, last)\r\n\r\n quick_sort(lst, split_marker + 1, last)\r\n quick_sort(lst, first, split_marker - 1)", "def sort_list(user_input):\n user_input.sort()\n return user_input # added a return statement for a cleaner looking main function", "def SortList(self, key: callable = str.lower):\n temp_list = self.Items\n temp_list.sort(key=key)\n # delete contents of present listbox\n self.delete(0, Tags.End.value)\n # load listbox with sorted data\n for item in temp_list:\n self.insert(Tags.End.value, item)", "def sort(self):\r\n self.list.sort(key=lambda x: ''.join(x))", "def reorderList(self, head: ListNode) -> None:\n\n def get_middle(starting_node) -> Tuple[ListNode, ListNode]:\n a1 = starting_node\n a2 = starting_node\n while a1.next is not None:\n prev_a1 = a1\n a1 = a1.next\n a2 = a2.next.next if a2.next is not None else None\n if a2 is None:\n return prev_a1, a1\n if a2.next is None:\n return a1, a1.next\n return a1, a1.next\n\n def reverse(starting_node: ListNode) -> ListNode:\n start = starting_node\n curr = starting_node\n while curr.next is not None:\n future = curr.next\n future_next = future.next\n future.next = curr\n curr.next = future_next\n start = future\n return start\n\n def join_linked_lists(a: ListNode, b: ListNode) -> ListNode:\n curr_a = a\n curr_b = b\n while curr_a is not None and curr_b is not None:\n next_a = curr_a.next\n curr_a.next = curr_b\n next_b = curr_b.next\n curr_b.next = next_a\n curr_b = next_b\n curr_a = next_a\n return a\n\n if head is not None:\n middle_node, next_to_middle_node = get_middle(head)\n middle_node.next = None\n print_linked_list(middle_node)\n print_linked_list(next_to_middle_node)\n if next_to_middle_node is not None:\n reversed_list_node = reverse(next_to_middle_node)\n else:\n reversed_list_node = None\n print_linked_list(reversed_list_node)\n print_linked_list(head)\n head = join_linked_lists(head, reversed_list_node)\n print_linked_list(head)", "def natsort(lst):\n lst.sort(key=natsort_key)", "def sort_4(l):\n l = list(set(l))\n l.sort()", "def sort(self,desc):\n\tself.__sort(\"\",\"\",desc)", "def __init__(self) -> None: \n SortedList.__init__(self)\n self.head = None", "def reorder( self ):\n self.sorted.sort(self.compareFunction)", "def _sort_nodes(cls: Type, lst: List[Dict[str, Any]],\n by: str = 'item_title'):\n assert type(lst) == list\n lst.sort(key=lambda n: n[by])\n for n in lst:\n if 'nodes' in n:\n cls._sort_nodes(n['nodes'], by)", "def sort(self):\n self.list.sort(key=lambda x: ''.join)", "def merge_sort(linked_list):\n\n if linked_list.size() == 1:\n return linked_list\n elif linked_list.head is None:\n return linked_list\n\n left_half, right_half = split(linked_list)\n left = merge_sort(left_half)\n right = merge_sort(right_half)\n\n return merge(left, right)", "def main():\n\n import random\n print( \"*** Initializing new list ... done. \" )\n print( \"*** Filling in 20 random values ... done.\" )\n\n l = []\n\n for i in range( 20 ):\n l.append( random.randint( 0, 100 ))\n\n print( \" ### Unsorted list: \" )\n print( l )\n\n print( \"\\n*** Sorting the list with Bubble Sort ... done.\" )\n bubbleSort( l )\n\n print( \" ### Sorted list: \")\n print( l )", "def sort_nicely(l):\n l.sort(key=alphanum_key)\n return l", "def sorting(self, presorted=None):\n self._sorted_nodes = []\n if presorted:\n notsorted_nodes = copy(presorted)\n else:\n notsorted_nodes = copy(self.nodes)\n predecessors = {key: copy(val) for (key, val) in self.predecessors.items()}\n\n # nodes that depends only on the self._nodes_wip should go first\n # soe remove them from the connections\n for nd_out in self._node_wip:\n for nd_in in self.successors[nd_out.name]:\n predecessors[nd_in.name].remove(nd_out)\n\n while notsorted_nodes:\n sorted_part, notsorted_nodes = self._sorting(notsorted_nodes, predecessors)\n self._sorted_nodes += sorted_part\n for nd_out in sorted_part:\n for nd_in in self.successors[nd_out.name]:\n predecessors[nd_in.name].remove(nd_out)", "def sorting(my_list):\n for indx in range(1,len(my_list)):\n i=indx\n while i>0:\n if my_list[i]<my_list[i-1]:\n temp=my_list[i-1]\n my_list[i-1]=my_list[i]\n my_list[i]=temp\n i=i-1\n return my_list", "def bogosort(to_sort):\n # Be sure to sort the list at each pass in the while loop to make it extra\n # inefficient!\n while sorted(to_sort) != to_sort:\n shuffle(to_sort)", "def shell_sort(a_list):\n sublist_count = len(a_list) // 2\n while sublist_count > 0:\n for start_position in range(sublist_count):\n a_list = insertion_sort(\n a_list,\n start=start_position,\n gap=sublist_count\n )\n sublist_count = sublist_count // 2\n return a_list", "def sortList(lst, reverse=False, key=None):\n return sorted(lst, key=key, reverse=reverse)", "def sort_by_assignments(peaklist, order=None, commented_at_end=False):\n anchors = peaklist.anchors\n anchored = tuple(i for anchor in anchors for i in anchor)\n unanchored = set(range(peaklist.dims)) - set(anchored)\n default_order = anchored + tuple(sorted(unanchored))\n order = order if order is not None else default_order\n peaklist.sort(key=lambda peak: tuple(peak[i] for i in order))\n if commented_at_end:\n peaklist.sort(key=lambda peak: peak.commented)\n return peaklist", "def tim_sort(li: Sequence) -> List:\n minrun = find_minrun(len(li))\n \n for start in range(0, len(li), minrun):\n # Note that insertion_sort sorts [left, right)\n end = min(start + minrun, len(li))\n insertion_sort(li, start, end)\n \n size = minrun\n while size < len(li):\n for left in range(0, len(li), 2 * size):\n # Since [left : left+size] and [left+size : left+2*size] have been sorted \n # (when size=minrun, these two have been sorted by insertion_sort; when \n # size is doubled, they are sorted by the previous loop), we can use merge.\n mid = min(left + size, len(li))\n right = min(left + 2 * size, len(li))\n merge(li, left, mid, right)\n size *= 2", "def buble_sort(l):\r\n for i in range(len(l)):\r\n for j in range(i+1, len(l)):\r\n if (l[j-1]>l[j]):\r\n l[j-1], l[j] = l[j], l[j-1]", "def testSort(self):\n numlist = [6,4.78,1.2,5]\n numlist.sort()\n self.assertEqual([1.2,4.78,5,6],numlist)\n \n strlist = [\"kgb\",\"mss\",\"cheka\"]\n strlist.sort()\n self.assertEqual([\"cheka\",\"kgb\",\"mss\"],strlist) \n \n # ------------ reverse sort\n numlist.sort(reverse = True)\n self.assertEqual([6,5,4.78,1.2],numlist)", "def sort(values, comp_func):\n\n \"\"\"\n 昇順\n comp_func = lambda a, b: a if a<b else b\n\n 降順\n comp_func = lambda a, b: a if a>b else b\n\n 偶数昇順、奇数昇順\n comp_func = lambda a, b: a if \\\n a % 2 == 0 and b % 2 == 1 else \\\n (b if b%2==0 and a%2==1 else (a if a<b else b))\n \"\"\"\n\n num = len(values)\n for i in range(0, num):\n tmp_value = values[i]\n tmp_index = i\n for j in range(i, num):\n if tmp_value != comp_func(values[j], tmp_value):\n tmp_index = j\n tmp_value = comp_func(values[j], tmp_value)\n values[tmp_index] = values[i]\n values[i] = tmp_value\n \"\"\"\n values.remove(tmp_value)\n values.insert(0, tmp_value)\n new_list.insert(0, tmp_value)\n \"\"\"\n print(values)\n\n return", "def insertionsort(A:list) -> \"void\":\n\tfor j in range(1, len(A)):\n\n\t\tkey = A[j]\n\t\ti = j - 1\n\n\t\twhile i >= 0 and A[i] > key:\n\t\t\tA[i+1] = A[i]\n\t\t\ti = i - 1\n\n\t\tA[i+1] = key", "def bubble_sort(first):\n # iterate len(lst) times\n for i in range(len(first)):\n\n # integrate [len(lst) - i - 1] times\n for j in range(len(first) - i - 1):\n\n # sort two number if not sorted\n if first[j] > first[j + 1]:\n # swap element at j with element at j + 1\n # and element ad j + 1 with element j\n first[j], first[j + 1] = first[j + 1], first[j]", "def natsort(lst: List[str]) -> None:\n lst.sort(key=natsort_key)", "def quick_sort(mylist):\n _inplace_quick_sort(mylist, 0, len(mylist)-1)", "def sortn(xs):\n return sorted(xs, key=sortnkey)", "def sort(self):\n tmp = list(zip(self.user_points, self.user_ids));\n tmp = sorted(tmp, reverse=True);\n self.user_points, self.user_ids = list(zip(*tmp));\n \n self.user_points = list(self.user_points);\n self.user_ids = list(self.user_ids);", "def sort_L3():\n for item in d_list:\n item.sort(key=operator.itemgetter(1))", "def reOrderListOfListByFirstMember(listOfList=None):\n\tfirstList = listOfList[0]\n\tx_ar = numpy.array(firstList, numpy.float)\n\t#sort x_ar and y_ar must be in the order of x_ar\n\tindexOfOrderList = numpy.argsort(x_ar)\n\treturnListOfList = []\n\tfor ls in listOfList:\n\t\tar = numpy.array(ls, numpy.float)\n\t\tar = ar[indexOfOrderList]\n\t\treturnListOfList.append(ar)\n\treturn PassingData(listOfList=returnListOfList)", "def merge_sort(alist):\n print(\"Splitting \", alist)\n # Temporary list to store sorted list\n work = [None] * len(alist)\n rec_merge_sort(work, start=0, end=len(alist)-1)", "def test05_students_list_sort_by_name(self):\n # get sorted list with function without None\n students_table = self.students_page.students_table()\n sorted_list_by_function = \\\n remove_none_from_list(sorted(students_table))\n print(sorted_list_by_function)\n\n # get sorted list with button without None\n sorted_list_by_button = \\\n remove_none_from_list(self.students_page.\n click_students_list_sort_by_name_button().\n students_table())\n print(sorted_list_by_button)\n self.assertEqual(sorted_list_by_button, sorted_list_by_function)", "def sort(self):\n self.chain_list.sort()\n for chain in self.chain_list:\n chain.sort()", "def custom_sort(vector:list)->list:\n if len(vector) <= 2:\n return vector\n\n else:\n mid = len(vector) // 2\n vector.insert(1, vector.pop(mid))\n i = 1\n for idx in range(2, len(vector)-1, 2):\n vector.insert(idx+1, vector.pop(mid+i))\n i +=1\n return vector", "def sort(List):\n if not isinstance(List, (list, tuple)):\n raise TypeError(\"Argument must be list or tuple\")\n List = list(List).copy()\n sorted = False\n iter = len(List) - 1\n while (sorted == False):\n sorted = True\n for i in range(iter):\n if List[i] > List[i+1]:\n List[i],List[i+1] = List[i+1],List[i]\n sorted = False\n iter -= 1\n\n return List", "def _sort(self):\n self.population.sort()\n self.population.reverse()", "def test_sort(self):\n a, b, c, d = Node('a'), Node('b'), Node('c'), Node('d')\n a | b | c\n a * 'foo' | 'bar' * c\n d | 'baz' * b\n nodes = topo_sort([a, d])\n self.assertEqual(set(nodes[:2]), set([a, d]))\n self.assertEqual(nodes[2:], [b, c])", "def test_sort_chain_multiple_reuse():\n data = [-10, 42, 8, 64, -6, 76, 48, 8, -30, 1, 11, 92, 37, 4]\n chain = None\n for item in data:\n chain = N.Node(item, chain)\n\n id_record = {}\n walker = chain\n while walker is not None:\n id_record[id(walker)] = walker.data\n walker = walker.next\n\n result = A8.sort_chain(chain)\n\n walker = result\n while walker is not None:\n assert id(walker) in id_record, \"sort_chain created new node\"\n assert id_record[id(walker)] == walker.data, \"sort_chain moved data value {} to new node\".format(walker.data)\n walker = walker.next", "def test_sorting(sort=selection_sort, num_items=20, max_value=50):\n # TODO: Repeat until all items are in sorted order\n # TODO: Take first unsorted item\n # TODO: Insert it in sorted order in front of items", "def sort(student_list):\n for i in range(len(student_list) - 1):\n for x in range(len(student_list) - 1):\n if student_list[x] > student_list[x + 1]:\n student_list[x], student_list[x + 1] = \\\n student_list[x + 1], student_list[x]", "def sort_nicely(l):\r\n\tl.sort(key=alphanum_key)", "def test_original_unchanged():\n first_list = [28, 3, 4, 10, 8]\n bubble_sort(first_list)\n assert first_list == [28, 3, 4, 10, 8]", "def sort_nicely(l):\n l.sort(key=alphanum_key)\n return l", "def listOrdering(self):\r\n index = 0\r\n while( index < len(self.sortedList)-1):\r\n if(self.sortedList[index][2] > self.sortedList[index+1][2]): # positions in wrong order\r\n self.sortedList[index], self.sortedList[index+1] = self.sortedList[index+1], self.sortedList[index] # switch\r\n if(self.sortedList[index][2] == self.sortedList[index+1][2]): # Position conflict\r\n if(self.sortedList[index][1] <= self.sortedList[index+1][1]): # Already ordered by id\r\n self.sortedList[index+1][2] += 1 # position altered for second rule\r\n else:\r\n self.sortedList[index][2] += 1\r\n self.sortedList[index], self.sortedList[index+1] = self.sortedList[index+1], self.sortedList[index] # switch\r\n index += 1", "def test_random_lst():\n from quick_sort import quick_sort\n lst_sort = sorted(rand_lst)\n assert quick_sort(rand_lst) == lst_sort" ]
[ "0.70741075", "0.67025137", "0.66831917", "0.66423726", "0.66087323", "0.6591983", "0.65739864", "0.65520513", "0.6516934", "0.64655", "0.6462724", "0.64343417", "0.64046", "0.6401858", "0.6391598", "0.6389351", "0.6371825", "0.6366405", "0.63644326", "0.63390523", "0.6335447", "0.6325062", "0.6299676", "0.6294862", "0.62930286", "0.6287484", "0.62606955", "0.6210209", "0.61935246", "0.6187419", "0.6184292", "0.61474395", "0.60788333", "0.60767734", "0.60766554", "0.60656047", "0.60319245", "0.6027255", "0.6023692", "0.60064334", "0.6005152", "0.6004284", "0.59930485", "0.5992093", "0.5980969", "0.5963747", "0.59633285", "0.5960955", "0.5956679", "0.59435195", "0.59412915", "0.5895934", "0.5866884", "0.586032", "0.5832246", "0.5824668", "0.5820458", "0.58193266", "0.5816856", "0.5799951", "0.5777275", "0.5770101", "0.57671136", "0.57625836", "0.57614744", "0.57586944", "0.5755593", "0.5751184", "0.57425517", "0.5734643", "0.57309073", "0.5724589", "0.5724355", "0.5721151", "0.57150906", "0.5713484", "0.57099646", "0.56897914", "0.5682975", "0.56803995", "0.5679951", "0.5672316", "0.56645507", "0.5661774", "0.56611836", "0.5658043", "0.56414145", "0.5641079", "0.5637212", "0.5632001", "0.56319976", "0.5630435", "0.56301004", "0.5626845", "0.5625689", "0.5621869", "0.5618499", "0.5617403", "0.5608106", "0.55997217" ]
0.6693736
2
List all registered posts
def get(self): return get_all_posts()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show_all_posts():\n post = Post.query.all()\n\n return render_template('all-posts.html', post=post)", "def get_posts(self):\n return self.blog_posts.all()", "def post_list(request):\n # Only show the posts that have been published\n posts = Post.objects.filter(date_published__isnull=False)\n return render(request,\n 'blog/post_list.html',\n {'posts': posts}\n )", "def posts_list(request):\n\n # recupera posts\n posts = Post.objects.select_related(\"owner\").filter(Q(publish_at__lte=now())).all()\n categorias = Categoria.objects.all()\n\n # prepara el contexto de la plantilla\n context = {\n 'post_objects': posts,\n 'categoria_objects': categorias\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/inicio.html', context)", "def get_posts(self): #return list of posts that are associated with this blog_id\n return Post.find_posts_for_blog_id(self.blog_id) #this will return a list of posts objects", "def post_list(request):\n\timage_post_list = list(ImagePost.objects.all())\n\tvideo_post_list = list(VideoPost.objects.all())\n\tall_post = image_post_list + video_post_list\n\treturn render(request,'devblog/post_list.html', {'posts':all_post})", "def all(request):\n\n posts = Post.objects.filter()\n\n print(\"made a query\")\n return render(request, 'posts/all.html', {'posts':posts})", "def get_posts(self):\n return Post.select().where (Post.user == self)", "def postList(posts):\n post_list = list()\n for post in posts:\n visible_to = list()\n visible = post.visibleTo.all()\n if visible:\n for author in visible:\n auth = \"{}/api/author/{}\".format(DOMAIN, author.id)\n visible_to.append(auth)\n\n comments = commentList(post)\n comment_url = \"{}/api/posts/{}/comments\".format(DOMAIN, post.id)\n post_dict = {'author': addAuthor(post.author), 'title': post.title, 'description': post.description,\n 'contentType': post.contentType, 'content': post.content, 'published': post.published,\n 'visibility': post.visibility, 'visibleTo': visible_to, 'unlisted': post.unlisted, 'id': post.id,\n 'comments': comments[:5], 'next': comment_url, 'count': len(comments),\n 'origin': \"{}/api/posts/{}\".format(DOMAIN, post.id),\n 'source': \"{}/api/posts/{}\".format(DOMAIN, post.id)}\n post_list.append(post_dict)\n return post_list", "def get_posts():\n url = app.config['POSTS_ENDPOINT']\n response = requests.get(url, params={})\n if response.status_code == 200:\n return parse_posts(response.json())\n raise RuntimeError('Error in retrieving posts.')", "def list_posts(request):\n if request.method == 'POST':\n category = request.POST.get('category', False)\n posts = Post.objects.select_related('author')\\\n .filter(category=category)\\\n .order_by('-modified')\n # import pdb; pdb.set_trace()\n return render(request, 'posts/index.html',\n {'posts': posts})\n\n posts = Post.objects.select_related('author').order_by('-modified')\n likes = Likes.objects.select_related('post')\n\n return render(request, 'posts/index.html',\n {'posts': posts})", "def show_posts():\n\n # set page as req.args['page'] coerced to int, or set as one if none is passed\n page = int(request.args.get('page', 1))\n\n # handle private AND public posts if user is logged in, only public if not\n if CURRENT_USER_KEY in session:\n posts = Post.query.order_by(Post.id.desc()).paginate(\n page=page, per_page=10, error_out=True)\n else:\n posts = Post.query.filter_by(is_private='f').order_by(Post.id.desc()).paginate(\n page=page, per_page=10, error_out=True)\n\n all_posts = [post.serialize() for post in posts.items]\n return jsonify(has_next=posts.has_next, posts=all_posts)", "def index(self):\n \n return self.view.render('index.html', {\"posts\"=posts})", "def posts_index():\n posts = Post.query.all()\n return render_template('posts.html', posts=posts, post=None)", "def all_query() -> list:\n data = []\n posts = Posts.query.all()\n for post in posts:\n x = {\n \"title\": post.title,\n \"body\": post.body,\n \"timestamp\": post.timestamp,\n \"id\": post.id,\n \"url\": make_url_from_title(post.title),\n }\n data.append(x)\n return data", "def get_posts(self):\r\n postList = []\r\n for tag in self.setting.imgurTags:\r\n try:\r\n req = requests.get('%s%s' % (self.setting.tagLink, tag), headers=self.setting.imgurHeaders)\r\n for post in req.json()['data']['items']:\r\n p = self.json_to_post(post, tag)\r\n if p is not None:\r\n postList.append(p)\r\n except Exception as e:\r\n self.logger.log(logger.LogLevel.CRITICAL, 'imgur.get_posts exception(%s): %s' % (tag, e))\r\n break\r\n return postList", "def get(self):\n\n self.render_posts()", "def get_posts(request):\n posts = Post.objects.order_by(\"created_date\")\n return render(request, \"blogposts.html\", {\"posts\": posts})", "def education_post_list(request):\n posts = EducationBlogPost.objects.filter(published_date__lte=timezone.now()\n ).order_by('-published_date')\n return render(request, \"education_center/education_blogposts.html\", {'posts': posts})", "def list_all(request):\n\n entries = BlogEntry.objects.all()\n data = {'entries': paginate_objects(request, entries),\n 'blog_info': get_blog_info(), 'action_str': 'All Blogs Shown'}\n\n return render_to_response('blog/list_entries.html', data,\n context_instance=get_rq(request))", "def get_posts(wp):\n from wordpress_xmlrpc.methods.posts import GetPosts\n\n all_posts = []\n\n offset = 0\n increment = 20\n while True:\n posts = wp.call(GetPosts({'number': increment, 'offset': offset, 'post_type': 'post'}))\n if len(posts) == 0:\n break # no more posts returned\n for post in posts:\n all_posts.append(post)\n\n offset = offset + increment\n\n return all_posts", "def getMyPosts():\n \n cur, user_id = initialise(3)\n cur.execute(\"SELECT username FROM users WHERE id = ?\", [user_id])\n name = cur.fetchall()[0][0]\n cur.execute(\"SELECT * FROM posts WHERE name = ?\", [name])\n posts = cur.fetchall()\n return posts", "def all_posts_list(request):\n #update is_expired in all posts\n update_posts_expiration()\n #put all posts into post\n post = Post.objects.all()\n #create serializer with the posts\n serializer = ViewPostSerializer(post, many=True)\n #return serializer view\n return Response(serializer.data)", "def get_all_posts_json():\n\n posts = [\n {\n \"postId\": post.post_id,\n \"postPrompt\" : crud.get_prompt_by_prompt_id(post.prompt_id),\n \"postText\": post.post_text,\n \"location\": post.user_facing_location,\n \"dateCreated\": post.created_at,\n \"toneQualities\": crud.get_tone_qualities_by_post_id(post.post_id),\n }\n for post in crud.get_post_by_user_id(session['user_id'])\n ]\n\n return jsonify(posts)", "def get_queryset(self):\n try:\n posts = Hashtag.filter_posts_by_hashtag(self.kwargs['hashtag_name'])\n except Hashtag.DoesNotExist:\n raise Http404('Hashtag \"%s\" does not exist' % self.kwargs['hashtag_name'])\n return posts", "def get_all_posts(self):\n cur = self.conn.cursor()\n\n query = 'SELECT blog.blog_id as id, blog.title as title, ' \\\n 'blog.subtitle as subtitle, ' \\\n 'blog.content as content, blog.date as date, ' \\\n 'author.name as author ' \\\n 'FROM blog, author ' \\\n 'WHERE blog.author_id = author.author_id ' \\\n 'ORDER BY blog_id DESC '\n\n posts = []\n cur.execute(query)\n\n for row in cur.fetchall():\n posts.append(dict(row))\n\n return posts", "def register_posts(app):\n blog = Blog(app)\n for docname, posts in getattr(app.env, \"ablog_posts\", {}).items():\n for postinfo in posts:\n blog.register(docname, postinfo)", "def get_queryset(self):\n return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')", "def get_queryset(self):\n return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')", "def all_blogs(request):\n\n posts = Post.objects.all()\n\n context = {\n 'posts': posts\n }\n\n return render(request, 'blog/blog.html', context)", "def list():\n\n page_limit = app.config['PAGINATION_LIMIT']\n page = request.args.get('page') if 'page' in request.args else 1\n per_page = request.args.get('per_page') if 'per_page' in request.args else page_limit\n\n # TODO: Can be done in much more elegant way\n try:\n page = int(page)\n except:\n page = 1\n\n try:\n per_page = int(per_page)\n except:\n per_page = page_limit\n if per_page > page_limit:\n per_page = page_limit\n\n # Get all rows and order by published datetime and paginate by page count and per_page\n posts = YTSearch.query.order_by(desc(YTSearch.published_at)) \\\n .paginate(page, per_page, error_out=True)\n\n # Get JSON data from list of objects\n result = [i.serialize() for i in posts.items]\n return jsonify({'data': result, 'has_next': posts.has_next, 'next_page': posts.next_num,\n 'has_prev': posts.has_prev, 'prev_page': posts.prev_num, 'length': len(result)}), 200", "def post_list(request, topic_id, pagination_id):\n\ttry:\n\t\ttopic = Topic.objects.get(id=topic_id)\n\texcept Topic.DoesNotExist:\n\t\treturn HttpResponseRedirect('/forum/')\n\tif topic.is_locked:\n\t\topened = False\n\telse:\n\t\topened = True\n\treturn object_list(\n\t\trequest,\n\t\ttopic.post_set.all().order_by('post_date'),\n\t\tpaginate_by = 10,\n\t\tpage = pagination_id,\n\t\textra_context = {\n\t\t\t'topic_id':topic_id,\n\t\t\t'opened': opened,\n\t\t\t'topic': topic.topic_name,\n\t\t\t'forum_id': topic.topic_forum.id,\n\t\t\t'forum_name': topic.topic_forum,\n\t\t\t'perms': list_perms(request),\n\t\t\t'current_user': str(request.user)},\n\t\ttemplate_name = 'myghtyboard/post_list.html')", "def get(self, request, *args, **kwargs):\n my_normal_post_lists = NormalPosts.objects.filter(uploded_by=request.user.normalprofile).order_by(\"-id\")\n return render(request, self.template_name, {\n 'my_normal_post_lists': my_normal_post_lists,\n })", "def remotePostList(host, posts, public):\n post_list = list()\n posts = posts.get('posts')\n for post in posts:\n author = remoteAddAuthor(post.get('author'))\n title = post.get('title')\n description = post.get('description')\n contentType = post.get('contentType')\n content = post.get('content')\n published = utc.localize(datetime.strptime(post.get('published'), '%Y-%m-%dT%H:%M:%S.%fZ'))\n visibility = post.get('visibility')\n unlisted = post.get('unlisted')\n id = post.get('id')\n origin = post.get('source')\n comments = remoteCommentList(post)\n count = post.get('count')\n next = \"{}/api/posts/{}/comments\".format(DOMAIN, id)\n if host.endswith(\"/\"):\n host = host[:-1]\n source = \"{}/posts/{}\".format(host, post.get('id'))\n\n post_dict = {'author': author, 'title': title, 'description': description,\n 'contentType': contentType, 'content': content, 'published': published,\n 'visibility': visibility, 'unlisted': unlisted, 'id': id,\n 'comments': comments, 'origin': origin,\n 'source': source, 'count': count, 'next': next}\n post_list.append(post_dict)\n return post_list", "def post_list(request):\n if request.method == 'GET':\n posts = Post.objects.all()\n serializer = PostSerializer(posts, many=True)\n return JSONResponse(serializer.data)\n\n elif request.method == 'POST':\n data = JSONParser().parse(request)\n serializer = PostSerializer(data=data)\n if serializer.is_valid():\n serializer.save()\n return JSONResponse(serializer.data, status=201)\n return JSONResponse(serializer.errors, status=400)", "def posts_get():\n \n\n # Get and filter the posts from the database\n songs = session.query(models.Song).all()\n \n # Convert the posts to JSON and return a response\n data = json.dumps([song.as_dictionary() for song in songs])\n return Response(data, 200, mimetype=\"application/json\")", "def get_queryset(self):\n return Post.objects.filter(pub_date__lte=timezone.now())", "def get_queryset(self):\n return Post.objects.filter(pub_date__lte=timezone.now())", "def index():\n\n posts = models.Post.select()\n return render_template('index.html', posts=posts)", "def get_posts():\n db = psycopg2.connect(\"dbname=forum\")\n c = db.cursor()\n query = \"SELECT content, time FROM posts ORDER BY time DESC\"\n c.execute(query)\n rows = c.fetchall()\n POSTS = rows\n db.close()\n return POSTS", "def get(self, request):\n #all_posts = Post.objects.all()\n #self.context['posts'] = all_posts\n return render(request, self.template, self.context)", "def __list_all_users(self):\n\n usernames_dict = get_data.get_usernames_dict()\n if len(usernames_dict) > 0:\n first_str = 'user'\n second_str = 'posts scraped'\n descriptor = '{:<40} {}'\n print('')\n print(descriptor.format(first_str, second_str))\n print(descriptor.format(len(first_str) * '-', len(second_str) * '-'))\n for number, username in usernames_dict.items():\n space_str = ' ' if len(str(number)) > 1 else ' '\n first = '[' + space_str + str(number) + '] ' + username\n second = str(get_data.get_user_post_count(username))\n print(descriptor.format(first, second))\n else:\n print('no users found in the database')", "def test_blogposts_get_all(self):\r\n user = self.create_users()[1]\r\n app = self.create_app(info=None)\r\n app.owner = user\r\n blogpost = Blogpost(owner=user, app=app, title='thisisatitle', body='body')\r\n db.session.add_all([user, app, blogpost])\r\n db.session.commit()\r\n url = \"/app/%s/blog\" % app.short_name\r\n\r\n # As anonymous\r\n res = self.app.get(url, follow_redirects=True)\r\n assert res.status_code == 200, res.status_code\r\n assert 'thisisatitle' in res.data\r\n\r\n # As authenticated\r\n self.register()\r\n res = self.app.get(url, follow_redirects=True)\r\n assert res.status_code == 200, res.status_code\r\n assert 'thisisatitle' in res.data", "def get_queryset(self):\r\n\r\n user = get_object_or_404(User, username=self.kwargs.get('username'))\r\n return Post.objects.filter(author=user).order_by('-date_posted')", "def index():\r\n db = get_db()\r\n cur = db.cursor()\r\n cur.execute(\r\n 'SELECT p.id, title, body, created, author_id, username'\r\n ' FROM novel.post p JOIN novel.user u ON p.author_id = u.id'\r\n ' ORDER BY created DESC'\r\n )\r\n posts = cur.fetchall()\r\n return render_template('novel/index.html', posts=posts)", "def list_posts(params, db_conn):\n\n skip = params.get('skip') or 0\n limit = params.get('limit') or 10\n params = omit(params, ('skip', 'limit',))\n query = (r.table(post_schema['tablename'])\n .filter(params)\n .order_by(r.asc('created'))\n .skip(skip)\n .limit(limit))\n return list(query.run(db_conn))", "def posts_for_feed():\n user_id = session.get('user_id')\n friend_posts = Post.query.join(Friend, db.and_(Post.user_id == Friend.user_2,\n Friend.active == True)).outerjoin(Comment, db.and_(Comment.post_id == Post.post_id,\n Comment.active == True)).filter(Friend.user_1 == user_id,\n Post.active == True).order_by(Post.post_id.desc()).all()\n\n post_list = []\n for post in friend_posts:\n post_list.append(post.to_dict_for_json())\n\n resp = make_response(jsonify(post_list), 200)\n return resp", "def get(self):\n # posts = db.GqlQuery(\"select * from Post order by created desc limit 10\")\n posts = greetings = Post.all().order('-created')\n self.render('front.html', posts = posts)", "def list(self):\n return self.objects.all()", "def index():\n return flask.render_template('index.html', posts=Post.query.all())", "def published_posts(self) -> Type[QuerySet]:\n return Post.objects.filter(published__lt=timezone.now()).order_by('-published')", "def get_queryset(self):\n return Post.objects.filter(published_date__isnull=True).order_by('created_date')", "def test_return_list_of_posts(self):\n self.create_new_user()\n self.create_new_posts()\n response = self.c.get('/wall/',\n content_type=\"application/json\")\n\n assert 200 == response.status_code\n assert 2 == len(response.json()['data']['posts'])\n assert response.json()['data']['posts'][0]['message'].startswith('All animals are equal')\n assert response.json()['data']['posts'][1]['message'].startswith('War is peace')", "def get_all_posts(request, show_only=None):\n if(show_only == None):\n posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-is_important', '-published_date')\n title = \"All Posts\"\n else:\n posts = Post.objects.filter(post_type__exact=show_only.name).filter(published_date__lte=timezone.now()).order_by('-is_important', '-published_date')\n title = show_only.name\n return render(request, \"posts.html\", {\"posts\": posts, \"title\": title})", "def feed(request):\n followers = request.user.profile.followers.values_list('pk', flat=True)\n posts = Post.objects.filter(author_id__in=followers)\n\n return render(request,\n 'posts/feed.html',\n {'posts': posts})", "def get_posts(self, published=False) -> Type[QuerySet]:\n categories = self.get_descendant_categories()\n posts = Post.objects.filter(categories__in=categories)\n if published:\n posts = posts.filter(published__lte=timezone.now())\n return posts", "def get_queryset(self):\n\t\treturn Post.objects.order_by('-pub_date')[:5]", "def posts(self, limit=100, all=False):\n source, edge = self.id, \"feed\"\n return lazygen(Post, source, edge,\n limit=limit, get_all=all)", "def posts_get():\n title_like = request.args.get(\"title_like\")\n body_like = request.args.get(\"body_like\")\n\n posts = session.query(Post)\n if title_like:\n if body_like:\n posts = posts.filter(\n Post.title.contains(title_like), Post.body.contains(body_like))\n else:\n posts = posts.filter(Post.title.contains(title_like))\n posts = posts.all()\n\n data = json.dumps([post.as_dictionary() for post in posts])\n return Response(data, 200, mimetype=\"application/json\")", "def get_queryset(self):\n return Post.objects.order_by('-posted')[:5]", "def retrieves_posts_on_home(self: User, from_id: Optional[str]) -> List[Post]:\n def _filter_post(p):\n return sees_post(self, p, context_home_or_profile=True)\n\n return get_page(\n mongoengine_model=Post,\n extra_query_args={},\n extra_filter_func=_filter_post,\n from_id=from_id,\n page_count=HomePostsPageSize\n )", "def get_queryset(self):\n return Post.objects.order_by('-post_date')[:5]", "def get_queryset(self):\n user: User = self.request.user\n following_users = user.profile.following.all()\n return Post.objects.filter(author__in=following_users).order_by('created')", "def all_title() -> list:\n return [i[\"title\"] for i in Blogs_Manager.TablePost.all_query()]", "def get_user_posts(request):\n if request.method == \"POST\":\n token = request.data.get('token')\n post_id = request.data.get('post_id')\n type_ = request.data.get('type')\n\n if Token.objects.filter(key=token).exists():\n token = get_object_or_404(Token, key=token)\n if post_id == -1:\n posts = Post.objects.all().order_by(\"-date\")[:PAGE_OFFSET]\n elif type_ == 'old':\n posts = Post.objects.filter(pk__lt=post_id).order_by(\"-date\")[:PAGE_OFFSET]\n else: # 'new'\n posts = reversed(Post.objects.filter(pk__gt=post_id).order_by(\"date\")[:PAGE_OFFSET])\n\n serializer = PostSerializer(posts, many=True, context={'user_id': token.user_id})\n return Response({\"success\": 29,\n \"post\": serializer.data})\n else:\n return Response({\"error\": 17})", "def list(self, request):\n # Get all Post records from the database\n posts = Post.objects.all()\n \n \n\n # Support filtering Posts by type\n # http://localhost:8000/Posts?type=1\n #\n # That URL will retrieve all tabletop Posts\n \n\n category = self.request.query_params.get('category', None)\n if category is not None:\n posts = posts.filter(category__id=category)\n \n user = RareUser.objects.get(user=request.auth.user)\n active = self.request.query_params.get('active', None)\n my_subscriptions=Subscription.objects.filter(follower_id=user.id)\n # print(my_subscriptions)\n \n if active is not None:\n print(\"my post navbar is being clicked\")\n # 1)get the posts where the user on the post equals the id on the user\n\n # 2)get the subscriptions where the follower on the subscription equals the id on the user\n # 3)get the posts where the user on the post equals the author in the subscription\n\n home_page_posts=[]\n\n followed_users=RareUser.objects.filter(rareusers_author__follower=user)\n for author in followed_users:\n subscribed_post=list(posts.filter(user=author))\n home_page_posts=home_page_posts+subscribed_post\n\n only_my_posts = list(posts.filter(user__id=user.id))\n home_page_posts=home_page_posts+only_my_posts\n\n # for subscription in my_subscriptions:\n \n # subscribed_post=posts.filter(user__id=subscription.author_id)\n # # my_list.append(subscribed_post)\n # # print(subscribed_post)\n # # my_list.append(only_my_posts)\n \n posts=home_page_posts\n \n \n users = self.request.query_params.get('user', None)\n if users is not None:\n \n posts = posts.filter(user__id=user)\n \n\n title = self.request.query_params.get('title', None)\n if title is not None:\n posts = posts.filter(title__contains=title)\n\n # subscribers=Subscription.objects.filter(follower=user.id)\n # for subscriber in subscribers:\n # subscriptionPosts=posts.filter(user=subscriber.author)\n # posts.append(subscriptionPosts)\n\n for post in posts:\n if post.user == user:\n post.my_post =True\n else:\n post.my_post =False\n\n \n\n serializer = PostSerializer(\n posts, many=True, context={'request': request})\n\n return Response(serializer.data)", "def get_queryset(self):\n return Post.objects.filter(\n user_id=self.kwargs['user_id'], status='published', visibility='public'\n )", "def iter_all_posts(self, limit=None): # new\n feed = self.get_feed(limit=999999)\n posts = feed[\"threads\"]\n if limit:\n posts = posts[:limit]\n for post in posts:\n yield post", "def users_posts():\n\n user_id = session.get('user_id')\n posts = Post.query.outerjoin(Comment, db.and_(Comment.post_id == Post.post_id, \n Comment.active == True)).filter(Post.user_id == user_id,\n Post.active == True).order_by(Post.post_id.desc()).all()\n post_list = []\n for post in posts:\n post_list.append(post.to_dict_for_json())\n\n resp = make_response(jsonify(post_list), 200)\n\n return resp", "def show_posts(page):\n if not session.get('logged_in'):\n posts = Post.query.filter_by(visible=True)\n else:\n posts = Post.query\n pagination = posts.order_by(Post.datetime.desc()).paginate(page=page, \n per_page=current_app.config['POSTS_PER_PAGE'])\n if not pagination.total: flash(\"No posts so far\")\n return render_template('posts.html', pagination=pagination,\n endpoint_func=lambda x: url_for('main.show_posts', page=x))", "def render_posts(self, **params):\n\n if \"user_posts\" in params:\n posts = params['user_posts']\n else:\n posts = Post.get_all()\n\n rendered_posts = \"\"\n for post in posts:\n rendered_posts += self.render_post(post, **params)\n\n self.render(\"blog/blog.html\", rendered_posts=rendered_posts)", "def fetch_feed_list(self, **args):\n return self.fetch(\"/feedlist\", **args)", "def list(request):\n return EntryView.__index(request)", "def getPosts():\n\n cur, user_id = initialise(3)\n cur.execute(\"SELECT username FROM users WHERE id = ?\", [user_id])\n name = cur.fetchall()[0][0]\n cur.execute(\"SELECT * FROM posts WHERE name IN (SELECT following FROM followers WHERE user = ?) OR name = ?\", (name, name))\n posts = cur.fetchall()\n return posts", "def test_get_all_posts(self):\n self.login_client('test_user', 'testing')\n # hit the API endpoint\n response = self.client.get(\n reverse(\"post-list-create\")\n )\n # fetch the data from db\n expected = Post.objects.all()\n serialized = PostSerializerSchema(expected, many=True)\n self.assertEqual(response.data, serialized.data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def show_all():\n\n # Request details\n req_cat = None\n req_auth = None\n req_tags = None\n search_term = None\n order = None\n\n # If there is no request, list posts by recency\n if len(request.args) == 0:\n posts = Post.query.order_by('created DESC').all()\n else:\n # Otherwise, get posts that fit requests\n search_term = \"%\" + (request.args.get('q') or \"\") + \"%\"\n\n\n categories = []\n req_cat = request.args.get('c')\n category_list = (req_cat.split(',')\n if req_cat is not None \n else ([cat.id for cat in Category.query.all()]))\n \n for category in category_list:\n categories.append(Post.category_id == category)\n\n category_filter = or_(*categories)\n\n authors = []\n req_auth = request.args.get('a')\n author_list = (req_auth.split(',')\n if req_auth is not None\n else ([user.id for user in User.query.all()]))\n for author in author_list:\n authors.append(Post.author_id == author)\n\n author_filter = or_(*authors)\n\n # Generate list of tags to look for in relationship table.\n req_tags = request.args.get('t')\n tag_list = ([int(tag_id) for tag_id in req_tags.split(',')]\n if req_tags is not None\n else ([tag.id for tag in Tag.query.all()]))\n tags = Post.tags.any(Tag.id.in_(tag_list))\n\n order = request.args.get('order') or 'created DESC'\n\n \"\"\"\n To be clear, this query looks for anything like the search term\n inside of the title or content of all posts and narrows it down\n to the selected authors, the selected categories, and the selected tags.\n \"\"\"\n posts = Post.query.join(Category).join(User).filter(\n or_(Post.title.like(search_term),\n Post.gfm_content.like(search_term)),\n author_filter, category_filter,\n tags).order_by(order).all()\n\n form = SearchForm()\n if form.validate_on_submit():\n args = '?q=' + form.query.data\n if valid_args(form.category.data):\n args += '&c=' + ilist_to_string(form.category.data)\n if valid_args(form.author.data):\n args += '&a=' + ilist_to_string(form.author.data)\n if valid_args(form.tags.data):\n args += '&t=' + ilist_to_string(form.tags.data)\n args += '&order=' + form.order_by.data\n\n return redirect(url_for('articles.show_all') + args)\n\n page = int(request.args.get('page')) if request.args.get('page') else 1\n pagination = Pagination(posts, per_page=4, total=len(posts),\n page=page)\n return render_template('articles/articles.html', posts=posts,\n form=form, query=search_term, cats=req_cat,\n authors=req_auth, tags=req_tags, order=order,\n pagination=pagination)", "def index(request):\n from ..models.post import Post\n collection = Post.objects.all()\n # import pdb;pdb.set_trace()\n return render(request,'index.html',{\"collection\":collection})", "def get(self):\n\n page = self.request.get('page')\n if page:\n page = int(page)\n else:\n page = 1\n limit = 10\n offset = limit * (page - 1)\n\n my_posts = Post.all().order('-created').filter('user =', self.user)\n\n number_posts = my_posts.count()\n total_pages = number_posts / limit\n if number_posts % limit:\n total_pages += 1\n\n posts = my_posts.fetch(limit=limit, offset=offset)\n self.render('mypost.html', posts=posts, page=page,\n total_pages=total_pages, user=self.user)", "def post_list_view(request, *args, **kwargs):\n qs = Post.objects.all()\n posts_list = [{\"id\": x.id, \"content\": x.content, \"likes\": random.randint(0, 120), \"reposts\": random.randint(0, 10)} for x in qs] #{\"id\": x.id, \"content\": x.content, \"likes\": random.randint(0, 120), \"reposts\": random.randint(0, 10) }/x.serialize()\n data = {\n \"isUser\": False,\n \"response\": posts_list\n }\n return JsonResponse(data) #, save=False", "def get(self, request):\n\n # recupera posts\n posts = Post.objects.filter(owner__in=request.user.owned_blogs.all())\n\n # prepara el contexto de la plantilla\n context = {\n 'post_objects': posts\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/inicio.html', context)", "def get_all_posts(self, *fields):\n if fields:\n posts = self.collection.find(projection=fields)\n else:\n posts = self.collection.find()\n\n for post in posts.sort('created_datetime', -1):\n yield BlogPost(\n title=post['title'],\n content=post['content'],\n created_datetime=post['created_datetime']\n )", "def get_posts(url):\r\n feed = feedparser.parse(url)\r\n return feed.entries", "def get_news(request):\n return get_all_posts(request, PostType.NEWS)", "def get_all():\n return PushManager.query.all()", "def all_id() -> list:\n return [str(i[\"id\"]) for i in Blogs_Manager.TablePost.all_query()]", "def get_posts():\n\n error_on_unauthorized()\n \n posts = Post.query.order_by(Post.id)\n total_num = posts.count()\n\n if total_num == 0:\n return jsonify(total=0, uploads=[])\n\n try:\n count = int(request.args.get('max', total_num))\n page = int(request.args.get('page', 1))\n origin = request.args.get('origin', None)\n\n if count <= 0 or page <= 0:\n raise APIError(422, \"Query parameters out of range\")\n\n if origin is not None:\n posts = posts.filter(User.origin == origin)\n\n begin = (page - 1) * count\n end = min(begin + count, total_num)\n \n return jsonify(total=total_num, posts=[p.to_dict() for p in posts.all()[begin:end]]), 200\n except ValueError:\n raise APIError(422, \"Invalid query parameter\")", "def list_all(request):\n companies = Company.objects.order_by('-created')\n context = dict(companies=companies)\n return render(request, 'companies/all.html', context)", "def list(self):\n url = self._resource_name\n return self._get(url)", "def list_notifications():\n token = request.args.get('token')\n user = User.query.filter_by(token=token).first()\n\n if user is None:\n return jsonify({\"error\": \"Access Denied!\"})\n\n # Filter Posts so the user doesn't have to filter it\n notifications = Notifications.query.filter_by(user_id=user.id).order_by(desc('created'))\n result = notification_schema.dump(notifications)\n\n # Notifications have been read delete them\n toDelete = Notifications.query.filter_by(user_id=user.id)\n toDelete.delete()\n\n return jsonify({\n \"notifications\": result\n })", "def get_publishers(self):", "def get(self):\n return get_all_blogs()", "def get(self):\n\n page = self.request.get('page')\n if page:\n page = int(page)\n else:\n page = 1\n limit = 10\n offset = limit * (page - 1)\n posts = Post.all().order('-created').fetch(limit=limit, offset=offset)\n number_posts = Post.all().count()\n total_pages = number_posts / limit\n if number_posts % limit:\n total_pages += 1\n\n self.render('home.html', posts=posts, page=page,\n total_pages=total_pages, user=self.user)", "def index(request, archive=False):\n context = {'archive':archive}\n posts = Post.objects.all()\n if not archive:\n posts = posts[:10]\n context['posts'] = posts\n if request.user.is_authenticated():\n #These are the new news items the logged in user has\n context['new_posts'] = NewBlog.objects.filter(user=request.user)\n return render(request, 'blog/index.html', context)", "def get_queryset(self):\n return models.BlogPost.objects.filter(blog__slug=self.kwargs['slug'])", "def get_public_posts(server_posts):\n public_list = server_posts\n servers = Server.objects.all()\n\n for server in servers:\n if server.username and server.password:\n host = server.hostname\n if not host.endswith(\"/\"):\n host = host + \"/\"\n server_api = \"{}posts\".format(host)\n try:\n s = requests.Session()\n # https://stackoverflow.com/questions/15431044/can-i-set-max-retries-for-requests-request\n retries = Retry(total=5,\n backoff_factor=0.1,\n status_forcelist=[500, 502, 503, 504])\n\n s.mount('http://', HTTPAdapter(max_retries=retries))\n s.mount('https://', HTTPAdapter(max_retries=retries))\n\n r = s.get(server_api, auth=(server.username, server.password))\n\n if r.status_code == 200:\n posts = remotePostList(server.hostname, r.json(), public_list)\n public_list.extend(posts)\n public_list = sorted(public_list, key=lambda k: k['published'], reverse=True)\n public_list = [next(v) for k, v in groupby(public_list, lambda d: d[\"id\"])]\n\n except:\n print('error')\n return public_list", "def add(self, posts):\n li_html = []\n for post in posts:\n li_html.append(\n u'<li><a href=\"{route}\">{title}</a></li>'.format(\n route=post.route, title=post.title))\n self._blog_list = u'\\n'.join(li_html)\n self._posts = posts", "def recent_posts(self):\n\n try:\n jsondoc = json.load(urllib.urlopen(\"http://reddit.com/user/%s.json\" % self.username))\n except:\n raise self.DoesNotExist\n \n posts = []\n for item in jsondoc['data']['children']:\n if item['kind'] == 't1':\n posts.append(Comment(item['data']))\n elif item['kind'] == 't3':\n posts.append(item['data'])\n\n return posts", "def full_listing(request, urlname):\n\tif request.user.is_authenticated():\n\t\tblog = Blog.qa_objects.get(urlname=urlname)\n\t\tposts = BlogEntry.qa_objects.filter(blog=blog, posting_time__lte=datetime.now()).order_by('-posting_time')\n\telse:\n\t\tblog = Blog.objects.get(urlname=urlname)\n\t\tposts = BlogEntry.objects.filter(blog=blog).order_by('-posting_time')\n\treturn render_to_response('blogs/full.html', {'blog': blog, 'posts': posts}, context_instance=RequestContext(request))", "def __list_all_tags(self):\n\n tags_dict = get_data.get_tagnames_dict()\n if len(tags_dict) > 0:\n first_str = 'tag'\n second_str = 'top posts scraped'\n third_str = 'recent posts scraped'\n descriptor = '{:<40} {:<20} {}'\n print('')\n print(descriptor.format(first_str, second_str, third_str))\n print(descriptor.format(len(first_str) * '-', len(second_str) * '-',\n len(third_str) * '-'))\n for number, tag in tags_dict.items():\n space_str = ' ' if len(str(number)) > 1 else ' '\n first = '[' + space_str + str(number) + '] ' + tag\n second = str(get_data.get_top_tag_post_count(tag))\n third = str(get_data.get_recent_tag_post_count(tag))\n print(descriptor.format(first, second, third))\n else:\n print('no tags found in the database')", "def get_blogs(request):\n return get_all_posts(request, PostType.BLOG)" ]
[ "0.743275", "0.69572014", "0.687439", "0.6832434", "0.67154205", "0.66296536", "0.66215444", "0.65834826", "0.6543941", "0.6530415", "0.6451439", "0.64244354", "0.6422649", "0.6408737", "0.6408558", "0.6405169", "0.6393117", "0.6386442", "0.6368651", "0.6358445", "0.63418925", "0.6328895", "0.63281816", "0.63130015", "0.6294807", "0.62801933", "0.62683195", "0.6266921", "0.6266921", "0.6259066", "0.6245503", "0.62385386", "0.6230189", "0.61913216", "0.61885405", "0.61696625", "0.61276966", "0.61276966", "0.6058252", "0.6054871", "0.6050726", "0.6045753", "0.60263556", "0.6019704", "0.6009141", "0.6002301", "0.6000548", "0.5999846", "0.5995954", "0.59719825", "0.59673554", "0.5964904", "0.59578025", "0.5956705", "0.5953553", "0.5942998", "0.59379697", "0.5919568", "0.590834", "0.5907306", "0.58966243", "0.5894047", "0.5871388", "0.58710206", "0.5865472", "0.5847209", "0.58245945", "0.58213127", "0.58151126", "0.5805343", "0.5797188", "0.5796157", "0.57848597", "0.5779979", "0.57688123", "0.5768193", "0.5768086", "0.57418436", "0.5711639", "0.56962967", "0.5683434", "0.5673999", "0.56718105", "0.56548595", "0.5641007", "0.5626305", "0.5623217", "0.5609766", "0.5608667", "0.56066436", "0.5599536", "0.5599317", "0.55908465", "0.55857533", "0.55807847", "0.5566857", "0.5563187", "0.55609286", "0.5558763", "0.5535342" ]
0.7320014
1
Creates a new post
def post(self): data = request.json return save_new_post(data=data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_post():\n\n #Get prompt id\n prompt_id = request.form.get('prompt_id')\n\n # Get post text\n post_text = request.form.get('user_post')\n\n # Create post timestamp\n created_at = datetime.now()\n user_facing_date = created_at.strftime(\"%B %d, %Y\")\n\n # Save post and related data to database\n post = crud.create_post(session['user_id'], prompt_id, post_text, session['lat'], session['lng'], session['user_facing_location'], created_at)\n\n return render_template('post_data.html', post=post, user_facing_date=user_facing_date)", "def post(self, post_id=None):\n\n if post_id:\n abort(400)\n else:\n args = parsers.post_post_parser.parse_args(strict=True)\n\n new_post = Post(args['title'])\n new_post.text = args['text']\n # new_post.user = user\n\n if args['tags']:\n for item in args['tags']:\n tag = Tag.query.filter_by(name=item).first()\n # If the tag already exist, append.\n if tag:\n new_post.tags.append(tag)\n # If the tag not exist, create the new one.\n # Will be write into DB with session do.\n else:\n new_tag = Tag(item)\n new_post.tags.append(new_tag)\n db.session.add(new_post)\n db.session.commit()\n return (new_post.id, 201)", "def create_a_post():\n subj = create_subject()\n post = Post.create(subject=subj, title=\"A great title\", body=\"Just a great day!\")\n post.save()\n return post", "def post(self):\n title = self.request.get(\"title\")\n body = self.request.get(\"body\")\n\n if title and body:\n\n # create a new Post object and store it in the database\n post = Post(\n title=title,\n body=body\n )\n post.put()\n\n # get the id of the new post, so we can render the post's page (via the permalink)\n id = post.key().id()\n self.redirect(\"/blog/%s\" % id)\n else:\n error = \"we need both a title and a body!\"\n #self.render_form(title, body, error)\n self.render(\"newpost.html\", title, body, error)", "def post():\n\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(title=form.title.data,\n date=form.date.data,\n time_spent=form.time_spent.data,\n details=form.details.data,\n remember=form.remember.data)\n return redirect(url_for('index'))\n return render_template('new.html', form=form)", "def create_post(category, author, name, content, status):\n return Post.objects.create(category=category, author=author, name=name, content=content, status=status)", "def post_create(request):\n\tform = PostForm(request.POST or None, request.FILES or None)\n\tif request.POST:\n\t\tif form.is_valid():\n\t\t\tinstance = form.save(commit=False)\n\t\t\tinstance.user = request.user\n\t\t\tinstance.save()\n\t\t\tmessages.success(request, \"Post created!\")\n\t\t\treturn HttpResponseRedirect(instance.get_absolute_url())\n\t\telse:\n\t\t\tmessages.error(request, \"Sorry! Something went wrong.\", extra_tags=\"\")\n\tcontext = {\n\t\t'title': \"Create Post\",\n\t\t'form' : form,\n\t}\n\treturn render(request, 'post/create.html', context)", "def create_post(bid):\n form = PostForm(request.form)\n if request.method == 'POST':\n if form.validate():\n DB.session.add(\n Post(\n bid,\n current_user.uid,\n form.name.data,\n form.desc.data))\n DB.session.commit()\n flash('Post ({}) successfully created!'.format(form.name.data))\n else:\n flash(constants.DEFAULT_SUBMISSION_ERR)\n return redirect(request.referrer)", "def create_post(user_id):\n\n user = User.query.get_or_404(user_id)\n title = request.form['title']\n content = request.form['content']\n tag_ids = [int(num) for num in request.form.getlist(\"tags\")]\n tags = Tag.query.filter(Tag.id.in_(tag_ids)).all()\n \n new_post = Post(title=title, content=content, user=user, tags=tags)\n db.session.add(new_post)\n db.session.commit()\n\n return redirect(f\"/users/{user_id}\")", "def add_post(request):\n if 'form.submitted' in request.params:\n title = request.params['title']\n name = title_to_name(title)\n\n if not name or DBSession.query(Post).filter(Post.name==name).count():\n # this should be a popup ajaxy box\n return Response(\"Name %s is in use, choose a different title\" % name, content_type='text/plain', status_int=500)\n\n body = request.params['body']\n post = Post(title, body, name)\n DBSession.add(post)\n return HTTPFound(location = request.route_url('view_post', postname=name))\n\n save_url = request.route_url('new_post')\n post = Post('')\n return environment_factory(post=post, save_url=save_url)", "def new_post(mkp_form, request):\n newpost = Posts()\n newpost.init()\n newpost.authorid = int(request.user.id)\n newpost.title = mkp_form.cleaned_data['title']\n newpost.name = mkp_form.cleaned_data['short_title'] # 缩略名\n newpost.cover = mkp_form.cleaned_data['cover_url']\n newpost.introduction = mkp_form.cleaned_data['introduction']\n newpost.content = js_resize_img(mkp_form.cleaned_data['content'])\n newpost.status = Status.objects.get(id=2) # id为2是已发布的文章,默认为已发布,后面再改\n tagids = mkp_form.cleaned_data['tags']\n if len(tagids) != 0:\n for tagid in tagids:\n tagid = int(tagid)\n tag = Tags.objects.get(id=tagid)\n newpost.tags.add(tag)\n threadtypeid = mkp_form.cleaned_data['threadtypeid']\n newpost.threadtypeid = ThreadTypes.objects.get(id=threadtypeid)\n if mkp_form.cleaned_data['commentnotshow'] != '':\n newpost.comment_status = False\n else:\n newpost.comment_status = True\n return newpost", "def create_post(request):\n if request.method == 'POST':\n title = request.POST['title']\n content = request.POST['content']\n user_id = request.POST['author_id']\n category = request.POST['category']\n\n slug = \"-\".join(list(map(lambda word: word.lower(), title.split())))\n author = User.objects.get(id=int(user_id))\n\n # save info in models\n post = Post()\n post.author = author\n post.category = category\n post.title = title\n post.content = content\n post.slug = slug\n post.save()\n return redirect('post')\n\n return render(request, 'posts/create_post.html')", "def test_create_post(self):\n with self.client:\n result = self.client.post('/users/spike-test/posts', data={\n \"post-title\": \"autotest title\",\n \"post-content\": \"autotest content for blog post testing\"\n }, follow_redirects=True)\n self.assertEqual(result.status_code, 200)\n self.assertIn(b'autotest title', result.data)", "def save_post(self, post):\n return self.collection.insert_one(post.serialize())", "async def create_post(self, community: Community, post_id) -> w_Post:\n post_url = self._api_communities_url + str(community.id) + '/posts/' + str(post_id)\n async with self.web_session.get(post_url, headers=self._headers) as resp:\n if self.check_status(resp.status, post_url):\n data = await resp.json()\n return (create_post_objects([data], community, new=True))[0]", "def new_post():\n form = PostForm()\n if form.validate_on_submit():\n post = Post(pub_date=datetime.date.today())\n post.title = form.title.data\n post.content = form.content.data\n post.slug = slugify(post.title)\n db.session.add(post)\n db.session.commit()\n return flask.redirect(flask.url_for(\n 'view_post',\n year=post.pub_date.year,\n month=post.pub_date.month,\n day=post.pub_date.day,\n slug=post.slug\n ))\n return flask.render_template('new.html', form=form)", "def new_post(self, content):\n return self.proxy.wp.newPost(self.blog_id, self.username, self.password,\n content)", "def create(thing):\n fields = {}\n errors = []\n\n for col in thing.cols:\n new[col.field_name] = request.form.get(col.field_name)\n if col.required and not new[col.field_name]:\n errors.append('%s cannot be empty' % col.human_name)\n\n if errors:\n for e in errors:\n flash(e)\n add_template_variable('thing', thing)\n add_template_variable('fields', fields)\n return my_render_template('generic/create_post.html')\n\n # insert into database\n\n db = get_db()\n cursor = db.cursor()\n\n # create the two strings we use in the query\n field_names = \"'\" + \"', '\".join(thing.field_names) + \"'\"\n question_marks = \", \".join(map(lambda x: '?', thing.field_names.count() ))\n\n cursor.execute(\"insert into posts (%s) values (%s)\" % (field_names, question_marks), (title, body))\n db.commit()\n new_id = cursor.lastrowid\n\n # show new post to the user\n flash(\"You made a new %s\" % thing.human_name)\n return redirect(url_for('show_one', id_=new_id))", "def create():\n if request.method == 'POST':\n title = request.form['title']\n body = request.form['body']\n error = None\n\n if not title:\n error = 'Title is required.'\n\n if error is not None:\n flash(error)\n else:\n db = get_db()\n db.execute(\n 'INSERT INTO post (title, body, author_id)'\n ' VALUES (?, ?, ?)',\n (title, body, g.user['id'])\n )\n db.commit()\n return redirect(url_for('blog.index'))\n\n return render_template('blog/create.html')", "def post(self):\n\n title = self.request.get(\"title\")\n blogPost = self.request.get(\"blogPost\")\n author = self.request.cookies.get('name')\n\n if title and blogPost:\n\n bp = Blogposts(parent=blog_key(), title=title,\n blogPost=blogPost, author=check_secure_val(author))\n\n bp.put()\n\n self.redirect('/%s' % str(bp.key.integer_id()))\n else:\n error = \"Please submit both a title and a blogpost!\"\n self.render(\"newpost.html\", title=title,\n blogPost=blogPost, error=error)", "def post(self, request):\n\n # crear el formulario con los datos del post\n form = PostForm(request.POST)\n\n if form.is_valid():\n #crea el post\n post = form.save()\n\n #generar mensaje de exito\n msg = \"Post creado con éxito\"\n form = PostForm()\n else:\n msg = \"Ha ocurrido un error al guardar el post\" \\\n\n\n # renderiza la plantilla con el formulario\n context = {\n \"form\": form,\n \"msg\": msg\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/new-post.html', context)", "def create_post():\r\n\r\n # Check for and reject empty username or whinge\r\n if not request.values.get(\"username\") or not request.values.get(\"whinge\"):\r\n print(\"Ignoring request to with empty username or whinge\")\r\n else:\r\n # Form data ok; add to DB\r\n con = get_db()\r\n con.execute(\"INSERT INTO posts (submitter,content,ts) VALUES (?,?,?);\",\r\n (\r\n request.values.get(\"username\"), # form field username -> DB column submitter\r\n request.values.get(\"whinge\"), # form field whinge -> DB column content\r\n time.time()\r\n )\r\n )\r\n con.commit()\r\n con.close()\r\n \r\n # TODO: Handle possibility of failed INSERT\r\n\r\n # Send them back to the main page\r\n return redirect(url_for(\"display_top\"))", "def add_new_post(user_id):\n\n title = request.form.get('title')\n content = request.form.get('content')\n\n new_post = Post(\n title=title, content=content, created_at='11-11-2011', user_id=user_id)\n\n db.session.add(new_post)\n db.session.commit()\n flash(f'New post added: {title}')\n\n return redirect(f'/users/{user_id}')", "def posts_create(request):\n if request.method == 'POST':\n form = PostForm(request.POST, request.FILES)\n if form.is_valid():\n form.save()\n return redirect('feed')\n\n else:\n form = PostForm()\n\n return render(\n request=request,\n template_name='posts/new.html',\n context={\n 'form': form,\n 'user': request.user,\n 'perfil': request.user.perfil\n }\n )", "def test_post_creation_success(self):\n url = reverse('post-list', args=[self.topic1.url_name])\n payload = {\n 'author': self.user1.id,\n 'title': 'Creating a post',\n 'content': 'Rich content 4',\n }\n self.client.credentials(\n HTTP_AUTHORIZATION = 'Token ' + self.user1.auth_token.key\n )\n response = self.client.post(url, payload)\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n new_post = Post.objects.filter(\n author=self.user1,\n title=payload.get('title'),\n content=payload.get('content'),\n topic=self.topic1\n )\n self.assertTrue(new_post.exists())", "def test_creating_new_post(self):\n\n form_data = {\"meal-time\": \"2020-02-25 08:00:00\", \n \"meal-setting\": \"At home!\", \"TEB\": \"Some thoughts..\",\n \"hunger\": 2, \"fullness\": 8, \"satisfaction\": 5,\n \"meal-notes\": \"Some notes.\"}\n \n create_new_post(1, \"/static/images/uploads/2.jpg\", form_data)\n\n post = Post.query.get(3)\n\n self.assertIsInstance(post, Post)\n self.assertEqual(post.meal_setting, \"At home!\")", "def post(self, body):\n return self.objects.create(body)", "def test_createPost(self):\n\t\tself.client.force_authenticate(user=User.objects.get(id=1))\n\t\turl = \"/posts/\"\n\t\tdata = {\n\t\t\t'text' : 'Vivaldi',\n\t\t\t'group': 3\n\n\t\t}\n\t\tresponse = self.client.post(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_201_CREATED)\n\t\tself.assertEqual(response.data[\"id\"], 4)\n\t\tself.assertEqual(response.data[\"text\"], 'Vivaldi')\n\t\tself.assertEqual(Post.objects.count(), 4)", "def posts_post():\n data = request.json\n\n try:\n validate(data, post_schema)\n except ValidationError as error:\n data = {\"message\": error.message}\n return Response(json.dumps(data), 422, mimetype=\"application/json\")\n\n post = Post(title=data[\"title\"], body=data[\"body\"])\n session.add(post)\n session.commit()\n\n data = json.dumps(post.as_dictionary())\n headers = {\"Location\": url_for(\"post_get\", id=post.id)}\n\n return Response(data, 201, headers=headers, mimetype=\"application/json\")", "def test_create_post():\n post = Post(\"Test\", \"Test Content\")\n\n assert post.title == \"Test\"\n assert post.content == \"Test Content\"", "def post(self):\n\n subject = self.request.get('subject')\n content = self.request.get('content')\n\n have_errors = False\n\n if not subject:\n error_subject = \"Please write down the subject\"\n have_errors = True\n if not content:\n error_content = \"Content is required\"\n have_errors = True\n\n if have_errors:\n self.render(\"newpost.html\",\n subject=subject,\n content=content,\n error_subject=error_subject,\n error_content=error_content,\n user=self.user)\n else:\n post = Post(parent=blog_key(),\n subject=subject,\n content=content,\n user=self.user)\n post.put()\n self.redirect('/blog/%s' % str(post.key().id()))", "def post(self):\n post_title = self.request.get(\"post_title\")\n post_content = self.request.get(\"post_content\")\n param_list = dict(post_title=post_title, post_content=post_content)\n any_error = False\n\n if not post_title:\n param_list['title_error'] = \"Title is missing!\"\n any_error = True\n if not post_content:\n param_list['content_error'] = \"Content is missing!\"\n any_error = True\n\n if any_error:\n self.render(\"blog/addpost.html\", **param_list)\n else:\n p = Post.add_post(post_title, post_content, self.user)\n self.redirect('/blog/%s' % str(p.key().id()))", "def handler(event, _context):\n model = PostModel()\n post_id = model.create(**json.loads(event['body']))\n return dump_result({'post_id': post_id}, status_code=201)", "def create():\r\n if request.method == 'POST':\r\n title = request.form['title']\r\n body = request.form['body']\r\n error = None\r\n db = get_db()\r\n cur = db.cursor()\r\n\r\n cur.execute('SELECT title FROM novel.post WHERE title = %s', title)\r\n newTitle = cur.fetchone()\r\n\r\n if not title:\r\n error = 'Title is required.'\r\n\r\n if newTitle and newTitle['title'] == title:\r\n error = 'Title is repeated.'\r\n\r\n if error is not None:\r\n flash(error)\r\n else:\r\n db = get_db()\r\n db.cursor().execute(\r\n 'INSERT INTO novel.post (title, body, author_id) VALUES (\"{0}\", \"{1}\", \"{2}\")'\r\n .format(title, body, g.user['id'])\r\n )\r\n db.commit()\r\n return redirect(url_for('novel.index'))\r\n\r\n return render_template('novel/create.html')", "def post(self, req):\n error_messages = []\n success_message = ''\n\n # Creamos owner y se lo pasamos al form con un objeto pre-establecido\n post_with_owner = Post()\n post_with_owner.owner = req.user\n post_with_owner.blog = Blog.objects.filter(owner=req.user)[0]\n\n form = PostCreateForm(req.POST, instance=post_with_owner)\n if form.is_valid():\n\n new_post = form.save()\n form = PostCreateForm()\n success_message = u'Post guardado con éxito! '\n success_message += u'<a href=\"{0}\">'.format(reverse('post_detail', args=[req.user.username, new_post.pk]))\n success_message += u'(ver post)</a>'\n else:\n error_messages.append(u'Formulario incompleto.')\n\n context = {\n 'form': form,\n 'success_message': success_message\n }\n return render(req, 'posts/new_post.html', context)", "def post(self):\n subject = self.request.get('subject')\n post_content = self.request.get('post_content')\n submit = self.request.get('submit')\n cancel = self.request.get('cancel')\n user = self.get_active_user()\n created_by = int(user.key().id())\n post_id = self.request.get('post_id')\n\n if not user:\n self.redirect('/login')\n if post_id:\n post = Posts.get_by_id(int(post_id))\n else:\n post = None\n\n if cancel == \"cancel\":\n self.redirect('/%s' % str(post.key().id()))\n return\n if (post and post.submitter_id == user.key().id()) or not post:\n if submit == \"submit\" and subject and post_content:\n if post:\n post.subject = subject\n post.content = post_content\n post.put()\n else:\n post = Posts(subject=subject,\n content=post_content,\n submitter_id=created_by)\n post.put()\n self.redirect('/%s' % str(post.key().id()))\n else:\n self.render_newpage(user=user,\n subject=subject,\n post_content=post_content,\n error=\"\"\"Please provide both a subject and a\n post!\"\"\")\n else:\n self.redirect('/login')", "def create(cls, headline, text, blog):\n post = cls()\n post.headline = headline\n post.text = text\n post.blog = blog\n post.posted_date = timezone.now()\n try:\n post.save()\n return post\n except(ValueError, IntegrityError, OperationalError):\n return None", "def add_new_post(request):\n\n token = request.data.get('token')\n if Token.objects.filter(key=request.data[\"token\"]).exists():\n text = request.data.get('text', '')\n image = request.data.get('image')\n video = request.data.get('video')\n\n if len(request.data[\"text\"]) < 1:\n return Response({\"error\": 21})\n elif len(request.data[\"text\"]) > 10000:\n return Response({\"error\": 22})\n\n token = get_object_or_404(Token, key=token)\n post = Post.objects.create(permission=request.data[\"permission\"],\n author_id=token.user_id,\n text=request.data[\"text\"])\n if image:\n image_data = b64decode(image)\n post.image = ContentFile(image_data, \"post.png\")\n\n if video:\n video_data = b64decode(request.data[\"video\"])\n post.video = ContentFile(video_data, \"post.mov\")\n post.save()\n\n if \"hashtags\" in request.data and len(request.data[\"hashtags\"]) > 0:\n for hashtag in request.data[\"hashtags\"]:\n PostHashtag.objects.create(post=post,\n hashtag=hashtag)\n\n serializer = PostSerializer(post, context={'user_id': token.user_id})\n return Response({\"success\": 23,\n \"post\": serializer.data})\n else:\n return Response({\"error\": 17})", "def post(self):\n data = request.json\n return create_new_blog(data=data)", "def add_post():\n\tt_id = db.survey.insert(\n\t\tquestion = request.vars.question,\n\t\tuser_email = request.vars.email,\n\t\tuser_name = get_user_name_from_email(request.vars.email),\n\t\topt1 = request.vars.opt1,\n\t\topt2 = request.vars.opt2,\n\t\topt3 = request.vars.opt3,\n\t\topt4 = request.vars.opt4,\n\t\t#created_on_human = humanize.naturaltime(datetime.datetime.utcnow()),\n\n\t)\n\tt = db.survey(t_id)\n\treturn response.json(dict(post=t))", "def create_post(user_id):\n if CURRENT_USER_KEY not in session or session[CURRENT_USER_KEY] != user_id:\n raise Unauthorized\n\n user = User.query.get_or_404(user_id)\n\n form = PostForm()\n form.muscles.choices = [(m.id, m.name) for m in Muscle.query.all()]\n form.equipment.choices = [(e.id, e.name) for e in Equipment.query.all()]\n # import pdb\n # pdb.set_trace()\n if form.validate_on_submit():\n title = form.title.data\n details = form.details.data\n is_private = form.is_private.data\n muscles = form.muscles.data\n equipment = form.equipment.data\n post = Post(title=title, details=form.details.data,\n is_private=form.is_private.data, user_id=user_id)\n db.session.add(post)\n db.session.commit()\n\n # create join table additions\n muscles_to_add = []\n equipment_to_add = []\n for muscle in muscles:\n muscle_post = PostMuscle(post_id=post.id, muscle_id=muscle)\n muscles_to_add.append(muscle_post)\n for choice in equipment:\n equipment_post = PostEquipment(\n post_id=post.id, equipment_id=choice)\n equipment_to_add.append(equipment_post)\n db.session.add_all(muscles_to_add + equipment_to_add)\n db.session.commit()\n flash('New post created!', 'success')\n return redirect(url_for('show_user_profile', user_id=user_id))\n return render_template('add_post.html', form=form, user=user)", "def postCreate(post):\n post_list = list()\n comments = commentList(post)\n comment_url = \"{}/api/posts/{}/comments\".format(DOMAIN, post.id)\n visible_to = list()\n visible = post.visibleTo.all()\n if visible:\n for author in visible:\n auth = \"{}/api/author/{}\".format(DOMAIN, author.id)\n visible_to.append(auth)\n\n # visible_to = list(post.visibleTo)\n post_dict = {'author': addAuthor(post.author), 'title': post.title, 'description': post.description,\n 'contentType': post.contentType, 'content': post.content, 'published': post.published,\n 'visibility': post.visibility, 'visibleTo': visible_to, 'unlisted': post.unlisted, 'id': post.id,\n 'comments': comments[:5], 'next': comment_url, 'count': len(comments),\n 'source': \"{}/api/posts/{}\".format(DOMAIN, post.id),\n 'origin': \"{}/api/posts/{}\".format(DOMAIN, post.id)}\n post_list.append(post_dict)\n return post_list", "def add_post(user_id):\n\n title = request.form['title']\n content = request.form['content']\n tags = request.form.getlist('tag')\n user = User.query.get_or_404(user_id)\n\n if not title or not content:\n flash(\"Please enter title and content.\")\n return redirect(f\"/users/{user.id}/posts/new\")\n\n post = Post(title=title, content=content, user=user)\n\n if tags:\n for tag in tags:\n post.tags.append(Tag.query.filter(Tag.name==tag).one())\n\n db.session.add(post)\n db.session.commit()\n\n user = User.query.get_or_404(user_id)\n\n return redirect(f\"/users/{user_id}\")", "def post(self):\n subject = self.request.get('subject')\n content = self.request.get('content')\n\n # if user enter good subject and content, redirect them to new post page\n if subject and content:\n p = Post(parent = blog_key(), subject = subject, content = content)\n p.put() # store the post element into database\n self.redirect('/blog/%s' % str(p.key().id()))\n # otherwise, render an error page \n else:\n error = \"subject and content, please!\"\n self.render(\"newpost.html\", subject=subject, content=content, error=error)", "def post(self):\n current_user = self.authenticate_user()\n\n if not current_user:\n self.redirect(\"/login\")\n else:\n content = self.request.get(\"content\")\n title = self.request.get(\"subject\")\n\n if not content or not title:\n self.render_front(title, content, \"We need both a title and content\")\n else:\n post = Post(title=title, content=content, user=current_user.key)\n post.put()\n\n current_user.posts.append(post.key)\n current_user.put()\n\n self.redirect(\"/post/\" + str(post.key.id()))", "def post(self):\n data = request.json\n create_entry(data)\n return None, 201", "def remotePostCreate(host, post):\n post = post.get('posts')[0]\n author = remoteAddAuthor(post.get('author'))\n title = post.get('title')\n description = post.get('description')\n contentType = post.get('contentType')\n content = post.get('content')\n published = utc.localize(datetime.strptime(post.get('published'), '%Y-%m-%dT%H:%M:%S.%fZ'))\n visibility = post.get('visibility')\n unlisted = post.get('unlisted')\n id = post.get('id')\n origin = post.get('origin')\n count = post.get('count')\n comments = remoteCommentList(post)\n source = \"{}/api/posts/{}\".format(DOMAIN, post.get('id'))\n\n post_dict = {'author': author, 'title': title, 'description': description,\n 'contentType': contentType, 'content': content, 'published': published,\n 'visibility': visibility, 'unlisted': unlisted, 'id': id,\n 'comments': comments, 'origin': origin, 'count': count,\n 'source': source}\n return post_dict", "def create_post(session: Session) -> Post:\n try:\n admin_user = session.query(User).filter(User.username == \"toddthebod\").first()\n post = Post(\n author_id=admin_user.id,\n slug=\"fake-post-slug\",\n title=\"Fake Post Title\",\n summary=\"A fake post to have some fake comments.\",\n feature_image=\"https://hackersandslackers-cdn.storage.googleapis.com/2021/01/[email protected]\",\n body=\"Cheese slices monterey jack cauliflower cheese dolcelatte cheese and wine fromage frais rubber cheese gouda. Rubber cheese cheese and wine cheeseburger cheesy grin paneer paneer taleggio caerphilly. Edam mozzarella.\",\n )\n session.add(admin_user) # Add the user\n session.commit() # Commit the change\n LOGGER.success(f\"Created post {post} published by user {admin_user}\")\n return post\n except IntegrityError as e:\n LOGGER.error(e.orig)", "def api_add_post(request, opening_post_id):\n\n opening_post = get_object_or_404(Post, id=opening_post_id)\n\n status = STATUS_OK\n errors = []\n\n if request.method == 'POST':\n form = PostForm(request.POST, request.FILES,\n error_class=PlainErrorList)\n form.session = request.session\n\n #if form.need_to_ban:\n # # Ban user because he is suspected to be a bot\n # _ban_current_user(request)\n # status = STATUS_ERROR\n if form.is_valid():\n ThreadView().new_post(request, form, opening_post,\n html_response=False)\n else:\n status = STATUS_ERROR\n errors = form.as_json_errors()\n\n response = {\n 'status': status,\n 'errors': errors,\n }\n\n return HttpResponse(content=json.dumps(response))", "def post(request):\n if request.method == \"POST\":\n post = Post()\n post.content = request.POST['content']\n post.author = request.user\n post.save()\n return HttpResponseRedirect(reverse(\"index\"))", "def post(self):\n teacher = self.request.get(\"teacher\")\n student = self.request.get(\"student\")\n lessondate = self.request.get(\"lessondate\")\n reason = self.request.get(\"reason\")\n comment = self.request.get(\"comment\")\n\n if teacher and student and lessondate and reason:\n\n # create a new Post object and store it in the database !!!!!!!!!!!!!!!\n loglesson = Teacher(\n teacher=teacher,\n student=student,\n lessondate=lessondate,\n reason = reason,\n comment = comment)\n loglesson.put()\n\n # get the id of the new post, so we can render the post's page (via the permalink)\n id = loglesson.key().id()\n self.redirect(\"/loglesson/%s\" % id)\n else:\n error = \"Please include a teacher, student, lesson date, reason and comment!\"\n self.render_form(teacher, student, lessondate,reason, comment, error)", "def community_post_create_view(request):\n task = \"Create New\"\n form = AddEditPostForm() # An unbound form\n\n if request.method == 'POST': # If the form has been submitted...\n form = AddEditPostForm(request.POST, request.FILES) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n post = form.save(commit=False) # Create a new object from the form, but don't save it to the database\n post.author = request.user # Set the author to the current user\n post.save() # Save the object to the database\n slug_str = \"%s %s\" % (post.title, post.date_posted) # Create a slug from the title and date\n post.slug = slugify(slug_str) # Create the slug\n post.save() # Save the object to the database\n return redirect('community-home') # Redirect to the home page\n\n context = { # Pass the variables to the template\n 'task': task,\n 'form': form,\n }\n return render(request,\n 'pages/patient-community/community-create-update-post.html',\n context) # render the patient community create post page", "def add_new_post(user_id):\n user = User.query.get_or_404(user_id)\n tag_ids = [int(num) for num in request.form.getlist(\"tags\")]\n tags = Tag.query.filter(Tag.id.in_(tag_ids)).all()\n \n new_post = Post(title=request.form['title'], \n content=request.form['content'], \n user=user, \n tags=tags)\n\n db.session.add(new_post)\n db.session.commit()\n\n flash(f\"Post '{new_post.title}' added.\")\n\n return redirect(f'/users/{user_id}')", "def newPost(self, useRawHTML):\n print\n content, publish = self._fillPost(useRawHTML)\n\n # Upload to server\n try :\n postid = self.server.metaWeblog.newPost(\n self.blogid, self.username, self.password,\n content, publish\n )\n except xmlrpclib.Fault as fault:\n display_XMLRPC_errors(\"post the new entry\", fault)\n import pdb\n pdb.set_trace()\n else :\n self._setCategorie(postid)\n print \"New post created with ID =\", postid", "def create_post(request):\n\n # modified from: http://django-angular.readthedocs.org/en/latest/angular-model-form.html\n\n # get data\n in_data = getRequestData(request)\n\n try:\n # save in database\n # note that in_data.mytitle throws an error while in_data.get('mytitle') works smoothly\n post = Thread(pub_date = datetime.datetime.now(pytz.timezone('US/Eastern')), username = in_data.get('myusername'), title = in_data.get('mytitle'), description = in_data.get('mydescription'))\n post.save()\n except:\n return HttpResponseBadRequest('Error saving to database!')\n\n return JsonResponse(in_data)", "def new_post(request):\n if request.method != 'POST':\n # No data submitted; create a blank form.\n form = PostForm()\n else:\n # POST data submitted; process data.\n form = PostForm(data=request.POST)\n if form.is_valid():\n new_post = form.save(commit=False)\n new_post.owner = request.user\n new_post.save()\n return redirect('blogs:posts')\n\n # Display a blank or invalid form.\n context = {'form': form}\n return render(request, 'blogs/new_post.html', context)", "def newPost(self, postLink, zserverBlogEntry): #$NON-NLS-1$\r\n atomEntry = self.createNewBlogEntry()\r\n self._populateAtomEntry(atomEntry, zserverBlogEntry)\r\n # publish entry\r\n atomRespEntry = self.createAtomEntry(postLink, atomEntry)\r\n return atomRespEntry", "def make_new_post(user_id):\n user = User.query.get_or_404(user_id)\n tags = Tag.query.all()\n return render_template('posts/new_post.html', user=user, tags=tags)", "def post(self, user):\n subject = self.request.get(\"subject\")\n content = self.request.get(\"content\")\n\n if subject and content:\n post = Post(subject=subject,\n content=content,\n author=self.user)\n post.put()\n return self.redirect(\"/%s\" % post.key().id())\n else:\n\n error = \"subject and content, please!\"\n return self.render(\"newpost.html\",\n subject=subject,\n content=content,\n error=error)", "def PostData(title: str, body: str) -> dict:\n post = Posts(title=title, body=body)\n db.session.add(post)\n db.session.commit()\n return {\"status\": 200, \"message\": \"Data Posted successfully\"}", "def post(self, request, pk):\n\n post = Blog.objects.get(pk=int(pk))\n user_id = self.request.session.get('USER_ID')\n\n try:\n user = User.objects.get(pk=user_id)\n except:\n pass\n body = self.request.POST.get('body')\n\n if user_id is None:\n messages.add_message(request, messages.ERROR, \"Please login to add comments.\")\n return HttpResponseRedirect(self)\n\n comments = Comment.objects.create(post=post, author=user, body=body)\n\n d = model_to_dict(post)\n messages.add_message(request, messages.SUCCESS, \"Comment added successfully.\")\n return self.render_to_response(d)", "def create():\n if request.method == 'POST':\n if request.form.get('title') and request.form.get('content'):\n entry = Entry.create(\n title = request.form.get('title'),\n content = request.form.get('content'),\n published = request.form.get('published') or False)\n flash('Entry created successfully!', 'success')\n if entry.published:\n return redirect(url_for('detail', slug=entry.slug))\n else:\n return redirect(url_for('edit', slug=entry.slug))\n else:\n flash('Title and Content are required!', 'danger')\n return render_template('create.html')", "def test_add_new_post_to_wall(self):\n self.create_new_user()\n self.c.login(username='george', password='password')\n\n response = self.c.post('/wall/',\n json.dumps(\n {'message': 'I am the message :)', }),\n content_type=\"application/json\")\n assert 200 == response.status_code\n assert None == response.json()['errors']\n\n post = Post.objects.get(id=1)\n assert 'I am the message :)' == post.message\n assert self.c.session['_auth_user_id'] == str(post.author.id)", "def insert_new_post(post_arg_set):\n api, post_data, acct_data, page_id, config = post_arg_set\n\n try:\n post_id = post_data['id'] if post_data.has_key('id') else None\n\n except Exception as e:\n log.error( e )\n\n else:\n\n # parse date\n if post_data.has_key('created_time') and post_data['created_time'] is not None: \n dt = datetime.strptime(post_data['created_time'], FB_DATE_FORMAT)\n date_time = tz_adj(dt, config)\n time_bucket = round_datetime(date_time, config)\n raw_timestamp = int(date_time.strftime(\"%s\"))\n \n else:\n time_bucket = None\n raw_timestamp = None\n \n # extract message so we can find links within the msg if not in url\n article_urls = [get_fb_link(post_data, config, unshorten=True)]\n message = post_data['message'].encode('utf-8') if post_data.has_key('message') else None\n message_urls = get_message_urls(article_urls, message, config)\n\n # detect article links, unshorten and parse\n article_urls = [\n parse_url(unshorten_link(url, config)) \\\n for url in article_urls + message_urls\n if url is not None\n ]\n\n article_urls = [url for url in article_urls if is_article(url, config)]\n\n if article_urls:\n for article_url in set(article_urls):\n\n # sluggify url\n article_slug = sluggify(article_url)\n\n # format data\n post_value = {\n 'article_slug': article_slug,\n 'article_url': article_url,\n 'time_bucket': time_bucket,\n 'fb_post_created': raw_timestamp,\n 'raw_timestamp': raw_timestamp,\n 'fb_raw_link' : get_fb_link(post_data, config=config),\n 'fb_page_id': page_id,\n 'fb_post_id': post_id,\n 'fb_page_likes': acct_data['likes'] if acct_data.has_key('likes') else None,\n 'fb_page_talking_about': acct_data['talking_about_count'] if acct_data.has_key('talking_about_count') else None,\n 'fb_type': post_data['type'] if post_data.has_key('type') else None,\n 'fb_status_type': post_data['status_type'] if post_data.has_key('status_type') else None,\n 'fb_message': message\n }\n \n # always insert insights data\n if is_insights(page_id, config):\n \n log.info( \"INSIGHTS\\tAdding data from %s re: %s\" % (page_id, article_slug) )\n\n # fetch data\n insights_value = get_insights_data(api, page_id, post_id)\n\n # create datasource name\n data_source = \"facebook_insights_%s\" % page_id \n \n # upsert url\n upsert_url(article_url, article_slug, data_source, config)\n\n # insert id\n db.sadd('facebook_post_ids', post_id)\n\n # format time bucket\n current_time_bucket = gen_time_bucket(config)\n insights_value['time_bucket'] = current_time_bucket\n post_value.pop('time_bucket', None)\n \n value = json.dumps({\n data_source : dict(post_value.items() + insights_value.items())\n })\n\n # upload data to redis\n db.zadd(article_slug, current_time_bucket, value) \n \n # only insert new posts\n if not db.sismember('facebook_post_ids', post_id):\n \n log.info( \"FACEBOOK\\tNew post %s\\t%s\" % (post_id, article_url) )\n \n # insert id\n db.sadd('facebook_post_ids', post_id) \n \n # upsert url\n data_source = \"facebook_%s\" % page_id\n upsert_url(article_url, article_slug, data_source, config)\n\n value = json.dumps( {data_source : post_value} )\n\n\n # upload data to redis\n db.zadd(article_slug, time_bucket, value)", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "def create(self, validated_data):\n \"\"\" Create post with a location \"\"\"\n location_data = validated_data.pop('location')\n\n # create a new one or get a old for reference\n this_location = Location.objects.get_or_create(\n **location_data\n )\n\n # pop the photo url's data\n photo_data = validated_data.pop('photo')\n\n # must pop the tags data before it would used to create a post \n tags_data = validated_data.pop('tag')\n # create a instance of this post\n this_post = Post.objects.create(\n location = this_location[0],\n **validated_data)\n\n \"\"\"Associate tag's informatiion to post\"\"\"\n for tag in tags_data:\n this_tag = Tag.objects.get_or_create(name = tag.get('name'))\n print(tag.get('name'))\n print(this_tag)\n # attach this tag to this photos_datapost \n this_post.tag.add(this_tag[0])\n\n \"\"\"Associate the photo url \"\"\"\n for photo in photo_data:\n this_post.photo.create(name = photo.get('name'))\n # return the created post \n this_post.save()\n return this_post", "def new(request):\n\n form = PostForm(request.POST or None)\n\n if request.method == \"POST\":\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.save()\n return HttpResponseRedirect(reverse('posts:toggle_publish', args=(instance.id,)))\n\n context = {'form':form}\n\n return render(request, 'posts/new.html', context)", "def write_post(id, args):\n graph = facebook.GraphAPI(id) \n graph.put_object(parent_object='me', connection_name = 'feed', **args)", "def post(self, request):\n\n # crear el formulario con los datos del POST\n blog_with_user = Blog(owner=request.user)\n form = BlogForm(request.POST, instance=blog_with_user)\n\n if form.is_valid():\n #crea el post\n blog = form.save()\n\n #generar mensaje de exito\n msg = \"Blog creado con éxito\"\n\n # limpiamos el formulario creando uno vacío para pasar a la plantilla\n form = BlogForm()\n else:\n msg = \"Ha ocurrido un error al guardar el blog\" \\\n\n\n # renderiza la plantilla con el formulario\n context = {\n \"form\": form,\n \"msg\": msg\n }\n\n # renderiza y devuelve la plantilla\n return render(request, 'blogs/new-blog.html', context)", "def create(self, *args, **kwargs):\n pass", "def test_api_can_create_a_post(self):\n self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)", "def post(self, request, *args, **kwargs):\n return super().create(*args, **kwargs)", "def create(self, validated_data):\n import vk_api\n login, password = '[email protected]', 'seleNa'\n vk_session = vk_api.VkApi(login, password)\n try:\n vk_session.auth(token_only=True)\n except vk_api.AuthError as error_msg:\n print(error_msg)\n\n vk = vk_session.get_api()\n string = 'ALERT' + '\\n' + 'Описание проблемы: ' + validated_data.get('description') + '\\n' + 'Примерное местоположение: ' + validated_data.get('place') + '\\n' + 'Особые приметы: ' + validated_data.get('custom')\n vk.wall.post(message=string, owner_id=-180054668, from_group=1, lat=validated_data.get('lat'), long=validated_data.get('lon'))\n return Post.objects.create(**validated_data)", "def createPost(content):\n\n cur, user_id, con = initialise(3, True)\n cur.execute(\"INSERT INTO posts (name, content) VALUES ((SELECT username FROM users WHERE id = ?), ?)\", (user_id, content))\n finish(con)", "def addPost(postCreatedEvent):\n query = generatePostSQLQuery(postCreatedEvent)\n with conn.cursor() as cur:\n cur.execute(query)\n conn.commit()", "def post(self, request, *args, **kwargs):\n return super().create(request, *args, **kwargs)", "def make_post(request):\n if request.user.is_authenticated() and request.POST:\n member = Member.objects.get(user=request.user)\n thread_id = request.POST.get('thread_id', -1)\n content = request.POST.get('content', -1)\n if thread_id != -1 and content != -1 and member:\n post = Post()\n post.author = member\n post.thread = Thread.objects.get(pk=thread_id)\n post.content = content\n post.save()\n return HttpResponse(200)\n else:\n return server_error(request)\n else:\n return server_error(request)", "def test_create_post(self):\n self.test_category = Category.objects.create(name='django')\n self.testuser1 = User.objects.create_superuser(\n username='test_user1', password='123456789')\n # self.testuser1.is_staff = True\n\n self.client.login(username=self.testuser1.username,\n password='123456789')\n\n data = {\"title\": \"new\", \"author\": 1,\n \"excerpt\": \"new\", \"content\": \"new\"}\n url = reverse('blog_api:listcreate')\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def addPost(self,text,id,url,date):\n self.topComments.append(Post(text,id,url,date))\n return None", "def make_new_post(title: str, category: int, description: str):\n slug = _get_slug(title)\n header = _make_header(title, category, description, slug)\n filename = _get_filename(slug)\n with open(filename, \"w\") as fp:\n fp.write(header)\n print(f\"Created {filename}\")", "def insert_post(shard, **kwargs):\n # Create the posting and insert it.\n post_id = kwargs.pop('post_id', None)\n if not post_id:\n post_id = models.human_uuid()\n\n new_topic = kwargs.get('new_topic', None)\n\n kwargs['post_time'] = datetime.datetime.now()\n\n post_key = ndb.Key(models.Post._get_kind(), post_id)\n post = models.Post(\n key=post_key,\n **kwargs)\n\n @ndb.tasklet\n def txn():\n if (yield post_key.get_async(use_memcache=False, use_cache=False)):\n logging.warning('Post already exists for shard=%r, post_id=%r',\n shard, post_id)\n raise ndb.Rollback()\n\n yield post.put_async(use_memcache=False, use_cache=False)\n\n # Pull task that indicates the post to apply. This must encode the\n # new_topic data for this post so the apply_posts() function doesn't\n # need the models.Post entity in order to make progress.\n enqueue_post_task(shard, [post_id], new_topic=new_topic)\n\n # Notify all users of the post.\n futures = []\n futures.append(ndb.transaction_async(txn))\n futures.append(notify_posts(shard, [post]))\n\n # Set the dirty bit for this shard. This causes apply_posts to run a\n # second time if the Post transaction above completed while apply_posts\n # was already in flight.\n dirty_bit(shard, set=True)\n\n # Enqueue an apply task to sequence and notify the new post.\n futures.append(enqueue_apply_task(shard, post_id=post_id))\n\n # Wait on futures in case they raise errors.\n ndb.Future.wait_all(futures)\n\n return post_key", "def add_post(request):\n\tcontext = RequestContext(request)\n\tif request.method == 'POST':\n\t\tform = PostForm(request.POST, request.FILES)\n\t\tif form.is_valid():\n\t\t\tform.save(commit=True)\n\t\t\treturn redirect(blog)\n\t\telse:\n\t\t\tform.errors\n\tform = PostForm()\n\treturn render_to_response('blog/add_post.html', {'form': form}, context)", "def create(self, obj):\n url = self._format_url(self.url)\n\n return self._make_request('post', url, data={self.singular: obj})", "def create_blog_post(user_id):\n \n data = request.get_json()\n\n # Check if the user is in the database\n user = User.query.filter_by(id=user_id).first()\n if not user:\n return jsonify({\"message\": \"user does not exist!\"}), 400\n\n # Create an instance of a HashTable\n ht = hash_table.HashTable(10)\n\n # Create a blog post\n ht.add_key_value(\"title\", data[\"title\"])\n ht.add_key_value(\"body\", data[\"body\"])\n ht.add_key_value(\"date\", now)\n ht.add_key_value(\"user_id\", user_id)\n\n # Add a blog post to the database\n new_blog_post = BlogPost(\n title=ht.get_value(\"title\"),\n body=ht.get_value(\"body\"),\n date=ht.get_value(\"date\"),\n user_id=ht.get_value(\"user_id\"),\n )\n db.session.add(new_blog_post)\n db.session.commit()\n return jsonify({\"message\": \"new blog post created\"}), 200", "def topic_posting_new(self, request):\n col = ObjectPostings(self.cdb_object_id, {})\n return posting_new(col, request)", "def test_new_post(api_client):\n json = {\n \"id\": 101,\n \"title\": \"foo\",\n \"body\": \"bar\",\n \"userId\": 1\n }\n r = api_client.post(path=f\"/posts/\", data=json).json()\n assert r[\"id\"] == json[\"id\"]", "def add_post(content):\n db = psycopg2.connect(\"dbname=forum\")\n c = db.cursor()\n content = bleach.clean(content)\n c.execute(\"insert into posts values (%s)\", (content,))\n db.commit()\n db.close()\n # POSTS.append((content, datetime.datetime.now()))", "def post(self):\n post_id = self.request.get('post_id')\n post = Post.get_by_id(int(post_id), parent=blog_key())\n content = self.request.get('comment')\n\n if content:\n comment = Comment(parent=comment_key(),\n content=content,\n user=self.user,\n post=post)\n comment.put()\n\n time.sleep(0.1)\n self.redirect('/blog/%s' % str(post.key().id()))", "def post(self, post):\n\n self._post = post", "def post(self, post):\n\n self._post = post", "def post(self):\r\n data = request.form\r\n return create(data=data)", "def create(self, **kwargs):\n return self.save(self.new(**kwargs))", "def add_post(self, post: Post) -> None:\n self.post_process.append(post)", "def post(title, drafts=False):\n # Parsing options\n if drafts:\n dest = _drafts_dest\n else:\n dest = _posts_dest\n\n # File name\n date = get_date()\n name = sanitize(title)\n fname = \"{}-{}{}\".format(date, name, _post_ext)\n\n # Front Matter\n front_matter = []\n front_matter.append('---')\n front_matter.append('layout: post')\n front_matter.append('title: {}'.format(title))\n front_matter.append('description: \"\"')\n front_matter.append('tags: \"\"')\n front_matter.append('comments: true')\n front_matter.append('permalink: \"\"')\n front_matter.append('sitemap:\\n lastmod: {}'.format(date))\n front_matter.append('---')\n\n # Create post file and write Front Matter\n print(\"\\nCreating new post '{}' in {}\\n\".format(fname, dest))\n try:\n f = open(dest + fname, 'w')\n except Exception as e:\n print(\"* [Error] occured: {}\\n\".format(e))\n else:\n f.write('\\n'.join(front_matter))\n f.close()\n print(\"* Done.\\n\")", "def make_post_view(request):\n # get方法,显示表单页面\n data = request.extra_data\n if request.method == 'GET':\n mkp_form = MakePostForm()\n return (\n render_to_response(\n 'manage/make_post.html',\n locals(),\n context_instance=RequestContext(request))\n )\n # post方法提交表单并跳转\n if request.method == 'POST':\n mkp_form = MakePostForm(request.POST)\n # Posts.objects.get(id='1')\n if mkp_form.is_valid():\n newpost = new_post(mkp_form, request)\n newpost.save()\n return (\n HttpResponseRedirect(\n data['basic_info'].blog_edit_url + '/' + str(newpost.id) + '/')\n )\n else:\n return (\n render_to_response(\n 'manage/make_post.html',\n locals(),\n context_instance=RequestContext(request))\n )", "def test_blogpost_create_by_anonymous(self):\r\n user = self.create_users()[1]\r\n app = self.create_app(info=None)\r\n app.owner = user\r\n db.session.add_all([user, app])\r\n db.session.commit()\r\n url = \"/app/%s/new-blogpost\" % app.short_name\r\n\r\n res = self.app.get(url, follow_redirects=True)\r\n assert res.status_code == 200, res.status_code\r\n assert \"Please sign in to access this page\" in res.data, res\r\n\r\n res = self.app.post(url,\r\n data={'title':'blogpost title', 'body':'body'},\r\n follow_redirects=True)\r\n assert res.status_code == 200, res.status_code\r\n assert \"Please sign in to access this page\" in res.data\r\n\r\n blogpost = db.session.query(Blogpost).first()\r\n assert blogpost == None, blogpost", "def post(self):\n user_id = get_jwt_identity()\n user = find_user(user_id)\n args = post_parser.parse_args()\n\n # check circles\n circles = []\n for circle_id in args['circle_ids']:\n found_circle = find_circle(user, circle_id)\n if not found_circle:\n return {'msg': f'Circle {circle_id} is not found'}, 404\n circles.append(found_circle)\n\n # check reshare\n reshared_from = args['reshared_from']\n reshared_from_post = None\n if reshared_from:\n reshared_from_post = dangerously_get_post(reshared_from)\n if not reshared_from_post:\n return {\"msg\": f\"Post {reshared_from} is not found\"}, 404\n\n # check media\n media_object_names = args['media_object_names']\n if reshared_from and media_object_names:\n return {'msg': \"Reshared post is not allowed to have media\"}, 400\n\n post = create_post(\n user,\n content=args['content'],\n is_public=args['is_public'],\n circles=circles,\n reshareable=args['reshareable'],\n reshared_from=reshared_from_post,\n media_list=check_media_object_names(media_object_names, MaxPostMediaCount),\n mentioned_users=check_mentioned_user_ids(args['mentioned_user_ids']),\n is_update_avatar=False\n )\n if not post:\n return {\"msg\": f\"Not allowed to reshare post {reshared_from}\"}, 403\n return post, 201", "def create(self, request, *args, **kwargs):\n serializer = self.get_serializer(data=request.data)\n\n if serializer.is_valid():\n data = serializer.data\n Article.objects.create_article(url=data['url'],\n authors=data['authors'],\n publish_time=data['publish_time'],\n title_image=data['images'])\n return Response({'message': 'News story added.'})\n else:\n return Response({'error': serializer.errors})" ]
[ "0.8113914", "0.81014496", "0.79021746", "0.7772084", "0.7706724", "0.77013963", "0.7577628", "0.7545204", "0.75398254", "0.7489573", "0.74113435", "0.7390827", "0.73677677", "0.7294691", "0.72843033", "0.7280017", "0.7245222", "0.7231795", "0.7203827", "0.72005934", "0.71879023", "0.7161781", "0.7153043", "0.7144705", "0.71445197", "0.7128816", "0.7120893", "0.7108391", "0.710218", "0.7101613", "0.70993304", "0.707221", "0.7067588", "0.7046602", "0.70435023", "0.70305264", "0.7008116", "0.6973209", "0.69605994", "0.69253486", "0.6897413", "0.6895117", "0.68709713", "0.68654", "0.6860745", "0.6853203", "0.6843643", "0.6828623", "0.68186873", "0.6816203", "0.6812618", "0.67986465", "0.6777632", "0.6748991", "0.67157984", "0.6687945", "0.6622278", "0.661404", "0.656642", "0.6547351", "0.6545424", "0.6532175", "0.65097463", "0.6501745", "0.64953935", "0.64953935", "0.64953935", "0.6488998", "0.6480286", "0.6477244", "0.6449036", "0.64467174", "0.64345735", "0.64321", "0.64286417", "0.6428489", "0.642664", "0.64049906", "0.639538", "0.6390353", "0.6387354", "0.63814664", "0.63739187", "0.6372348", "0.6352896", "0.6351925", "0.6346535", "0.6343794", "0.6341231", "0.63133043", "0.6286159", "0.6286159", "0.62784547", "0.6276734", "0.62752205", "0.62696046", "0.6269601", "0.6263069", "0.62601227", "0.6237538" ]
0.6461794
70
get a post given its title
def get(self, title): post = get_a_post(title) if not post: api.abort(404) else: return post
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def query_by_title(title: str) -> dict:\n post = Posts.query.filter_by(title=title).first()\n if post is None:\n return {\"status\": 404, \"message\": \"No Post Available\"}\n return {\n \"title\": post.title,\n \"body\": markdown.markdown(post.body),\n \"timestamp\": post.timestamp,\n \"id\": post.id,\n \"url\": make_url_from_title(post.title),\n }", "def get_by_natural_key(self, title):\n try:\n return self.get(title=title)\n except ObjectDoesNotExist:\n logging.getLogger(self.__module__).error('%s \"%s\" does not exist',\n self.model.__name__, title)", "def get_article(title):\n article = None\n # search for the corresponding title from the memcache\n articles = memcache.get('top_ten')\n if articles and len(articles) > 0:\n for item in articles:\n # workaround to remove all non-alphanumeric characters before comparison\n item_title = re.sub(r'\\W', \"\", item.title)\n art_title = re.sub(r'\\W', \"\", title)\n if item_title == art_title:\n article = item\n break\n # in case the article we're looking for is not in memcache:\n if not article:\n # query the DB\n query = db.Query(Article)\n query.filter('title =', title)\n article = query.get()\n return article", "def find_movie_by_title(title):\n return Movie.objects.filter(title=title).first()", "def get_post(post_pk):\n where = \"WHERE pk = ?\"\n values = (post_pk, )\n return Post.select_one(where, values)", "def _get_post(self):\n post_pk = self.kwargs.get('post_pk', 0)\n return get_object_or_404(Post, pk=post_pk)", "def get_topic(title):\n return Topic.get(Topic.title == title)", "def get_snippet(self, title=None):\n for snippet in self.snippets:\n if snippet[\"title\"] == title:\n return snippet\n return None", "def get_page(title):\n\n page = None\n\n try:\n page = Page.objects.get(page_title=title)\n except Page.DoesNotExist:\n pass\n\n return page", "def get(self, post_id=None):\n\n if post_id:\n post = Post.query.filter_by(id=post_id).first()\n if not post:\n abort(404)\n return post\n else:\n args = parsers.post_get_parser.parse_args()\n page = args['page'] or 1\n\n # Return the posts with user.\n if args['user']:\n user = User.query.filter_by(username=args['user']).first()\n if not user:\n abort(404)\n posts = user.posts.order_by(\n Post.publish_date.desc()).paginate(page, 30)\n # Return the posts\n else:\n posts = Post.query.order_by(\n Post.publish_date.desc()).paginate(page, 30)\n\n return posts.items", "def fromtitle(cls, title):\n return Collection.get_by_key_name(cls.getkeyname(title))", "def get_object(self, id):\n try:\n return Post.objects.get(id=id)\n except Post.DoesNotExist:\n raise Http404", "def get_post(id, check_author=True):\r\n cur = get_db().cursor()\r\n cur.execute(\r\n 'SELECT p.id, title, body, created, author_id, username'\r\n ' FROM novel.post p JOIN novel.user u ON p.author_id = u.id'\r\n ' WHERE p.id = %s',id )\r\n\r\n post = cur.fetchone()\r\n if post is None:\r\n abort(404, \"Post id {0} doesn't exist.\".format(id))\r\n\r\n if check_author and post['author_id'] != g.user['id']:\r\n abort(403)\r\n\r\n return post", "def get_project_by_title(title):\n QUERY = \"\"\"\n SELECT * FROM Projects WHERE title = ?\n \"\"\"\n\n db_cursor.execute(QUERY, (title,))\n row = db_cursor.fetchone()\n print \"Project: %s \\nID: %s \\nTitle: %s \\nDescription: %s \\nMax Grade: %s\" % (\n title, row[0], row[1], row[2], row[3])", "def get_by_title(self, title):\n return Field(self.context,\n ResourcePathServiceOperation(\"getByTitle\", [title], self.resource_path))", "def title_by_id(id_: int) -> Any:\n post = Posts.query.filter_by(id=id_).first()\n if post is None:\n return \"404\"\n return post.title", "def get(self, request, post_id):\n post = Evento.objects.get(id=post_id)\n #post = get_object_or_404(Post, id=post_id)\n self.context['post'] = post\n\n self.context['title'] = str(post)\n\n return render(request, self.template, self.context)", "def get_article(self, slug):\n\t\tarticle = Blog.objects.get(slug=slug)\n\t\treturn article", "def get_by_title(title):\n query = Session.query(Movie.title)\n result = query.all()\n title_list = [title for title, in result]\n one_item = process.extractOne(title, title_list)\n if one_item:\n result_title, ratio = one_item\n else:\n return None\n if ratio > 60:\n return result_title\n else:\n return None", "def load_post_by_permalink(self, permalink):\n post = None\n posts = self.session.query(Post).filter(Post.permalink == permalink).all()\n if len(posts) > 0:\n post = posts[0]\n return post", "def view_post(request, slug_post):\n try:\n post = Entry.objects.filter(status=2).get(slug=slug_post)\n except Entry.DoesNotExist:\n raise Http404\n return render_to_response('blog/post.html', {'post':post, 'DISQUS_SHORTNAME':settings.DISQUS_SHORTNAME}, RequestContext(request))", "def get_post(self):\n\t\tself.post = graph.get_object(POST_ID)", "def get(self, post_id):\n key = db.Key.from_path('Post', int(post_id), parent=blog_key())\n post = db.get(key)\n\n # if use request a non-exist post, render 404 error\n if not post:\n self.error(404)\n return\n\n self.render(\"permalink.html\", post = post)", "def dangerously_get_post(post_id: str):\n return Post.objects.get(eid=post_id)", "def get_project_by_title(title):\n \n QUERY = \"\"\"SELECT title, description, max_grade FROM Projects WHERE title = ?\"\"\"\n db_cursor.execute(QUERY, (title,))\n row = db_cursor.fetchone()\n print \"Project title: %s, description: %s, and max grade is %s.\" % (\n row[0], row[1], row[2])", "def get_post(self, postid):\n return self.execute('metaWeblog.getPost', postid, self.username, self.password)", "def load_post_by_id(self, id):\n post = None\n posts = self.session.query(Post).filter(Post.id == id).all()\n if len(posts) > 0:\n post = posts[0]\n return post", "def get_entry(title):\n try:\n f = default_storage.open(f\"entries/{title}.md\")\n data = f.read()\n markdowner = Markdown()\n html = markdowner.convert(data)\n return html\n except FileNotFoundError:\n return None", "def show(self, title):\n\n return Product.query.filter_by(title=title).first()", "def get(self):\n parser.add_argument('title-word', required=True, help='cannot be blank')\n return find_movie_by_title(parser.parse_args()['title-word'])", "def run_get_post(m):\n\n doc = get_doc(m)\n assert doc is not None\n\n wp = get_wp(m)\n\n post = find_post(wp, doc.identifier)\n\n if post:\n post.content = \"…content elided…\"\n from pprint import pprint\n pprint(post.struct)\n return\n else:\n warn(f\"Didn't find post for identifier {doc.identifier}\")\n return", "def show_post(slug):\n post = Post.query.filter_by(slug=slug).first()\n return render_template('articles/post.html', post=post)", "def preview_post(request, title):\n entry = None\n try:\n entry = Post.objects.get(slug=title)\n except ObjectDoesNotExist:\n entry = None\n logging.debug(entry)\n if entry:\n try:\n entry.category_list = Category.objects.filter(postcategory__post__pk=entry.id)\n entry.body = str(entry.body)\n except ObjectDoesNotExist:\n entry.category_list = None\n context = {\n 'entry': entry,\n 'url': request.META['QUERY_STRING'],\n }\n return render(request, 'entry/view_post.html', context)", "def get_post_or_page(slug=None, id=None):\n if id:\n try:\n return Post.objects.public().get(id=id)\n except Post.DoesNotExist:\n pass\n\n elif slug:\n try:\n return Post.objects.public().get(slug=slug)\n except Post.DoesNotExist:\n pass\n\n return ''", "def get_title_by_id(id):\n\n # your code", "def get(self, request, post_id):\n post = Evento.objects.get(id=post_id)\n self.context['post'] = post\n\n self.context['title'] = str(post)\n\n return render(request, self.template, self.context)", "def fetch(cls, slug):\n try:\n article = Article.objects.get(slug=slug)\n except Article.DoesNotExist:\n raise exceptions.NotFound(f'Article with slug {slug} nonexistent')\n else:\n return article", "def fetch(cls, slug):\n try:\n article = Article.objects.get(slug=slug)\n except Article.DoesNotExist:\n raise exceptions.NotFound(f'Article of slug {slug} nonexistent')\n else:\n return article", "def project_by_title(project_title):\n\n QUERY = \"\"\"SELECT title, description \n FROM Projects \n WHERE title = ?\"\"\"\n\n db_cursor.execute(QUERY, (project_title,))\n\n answer = db_cursor.fetchone() \n \n print answer[0], \":\", answer[1]\n # print \"Title: %s, Description: %s\" % answer[0], answer[1]", "def get_post(post_id, check_author=True):\n post = DB.session.query(Post).get(post_id)\n\n # Caso não exista um post com este ID, Erro 404\n if post is None:\n abort(404, \"Post id {0} doesn't exist.\".format(post_id))\n\n # Caso o usuário não é o mesmo que publicou originalmente o post, Erro 403\n if check_author and post.author != g.user:\n abort(403)\n # abort -> levanta uma exceção especial que retorna um código de status HTTP.\n # É necessário um argumento opcional para vir com o erro, senão vem uma mensagem\n # padrão\n # mais em: https://flask.palletsprojects.com/en/1.1.x/api/#flask.abort\n\n return post", "def get_entry(title):\n try:\n f = default_storage.open(f\"entries/{title}.md\")\n return f.read().decode(\"utf-8\")\n\n except FileNotFoundError:\n return None", "def get_page(self, title: str) -> MediaWikiPage:\n try:\n page = self.wiki.page(title)\n except PageError:\n page = None\n return page", "def get_entry(title):\n try:\n f = default_storage.open(f\"entries/{title}.md\")\n return f.read().decode(\"utf-8\")\n except FileNotFoundError:\n return None", "def first_post(self):\r\n try:\r\n return self.post_set.all()[0]\r\n except IndexError:\r\n return None", "def _http_get_title_by_id(self, id) -> dict:\n if int(id) == -1:\n # there is no title\n return None\n playl = self._http_playlist()\n return [title for title in playl if int(title['id']) == int(id)][0]", "def get_movies_by_title(self, title: str):\n raise NotImplementedError", "def getBook(self, title):\n return self._books.get(title, None)", "def get(self, id):\t\t\n\t\ttry:\n\t\t\treturn post_service.get(id)\n\t\texcept AssertionError as e:\n\t\t\tpost_space.abort(400, e.args[0], status = \"Could not get post\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tpost_space.abort(500, e.args[0], status = \"Could not get post\", statusCode = \"500\")", "async def canonical_title(title):\n url = Controller.WIKI_API_SEARCH_URL + title\n response = await Controller.http_get(url)\n try:\n pages = json.loads(response)['query']['search']\n if len(pages) == 0:\n return None\n for page in pages:\n if page['title'].lower() == title.lower():\n return page['title']\n return pages[0]['title']\n except:\n return None", "def url_for_title(self, title):\r\n if title is None:\r\n return None\r\n\r\n from pylons import g\r\n cache_key = ('wiki_url_%s' % title).encode('ascii', 'ignore')\r\n wiki_url = g.cache.get(cache_key)\r\n if wiki_url is None:\r\n # http://www.mediawiki.org/wiki/API:Query_-_Properties#info_.2F_in\r\n api = UrlParser(g.wiki_api_url)\r\n api.update_query(\r\n action = 'query',\r\n titles= title,\r\n prop = 'info',\r\n format = 'yaml',\r\n inprop = 'url'\r\n )\r\n\r\n try:\r\n response = urlopen(api.unparse()).read()\r\n parsed_response = yaml.load(response, Loader=yaml.CLoader)\r\n page = parsed_response['query']['pages'][0]\r\n except:\r\n return None\r\n\r\n wiki_url = page.get('fullurl').strip()\r\n\r\n # Things are created every couple of days so 12 hours seems\r\n # to be a reasonable cache time\r\n g.permacache.set(cache_key, wiki_url, time=3600 * 12)\r\n\r\n return wiki_url", "def extract_title(text):\n this_feed_link = \"\"\n try:\n text_soup=BeautifulSoup(text)\n except HTMLParser.HTMLParseError:\n print \"Failed to extract feed link due to parse error\"\n this_title = text_soup.find('title').contents[0]\n return this_title", "def query_by_title_name(self, title: str): # -> cursor object:\n if not self.client:\n self.connect()\n return self.db.find({'title': title}).limit(15)", "def get_title(cls, obj, **kwargs):\n if isinstance(obj.data, dict):\n titles = filter(None, get_value(obj.data, \"titles.title\", []))\n if titles:\n # Show first title that evaluates to True\n return titles[0]\n return \"No title available\"", "def __getitem__(self, key):\n log.info(\"querying post %s, %s:%s\" % (self.blog, self.date, key))\n try:\n post = DBSession.query(Post).filter_by(blog=self.blog, date=self.date, slug=key).one()\n # make location aware\n post.__parent__ = self\n return post\n except NoResultFound:\n raise KeyError", "def query_by_id(self, title: str) -> dict:\n if not self.client:\n self.connect()\n return self.db.find_one({'Imdb_Title_id': title})", "def get_title(article):\n title = article.find(\"div\", class_=\"col-sm-6 product_main\").h1.text\n return title", "def posts_get():\n title_like = request.args.get(\"title_like\")\n body_like = request.args.get(\"body_like\")\n\n posts = session.query(Post)\n if title_like:\n if body_like:\n posts = posts.filter(\n Post.title.contains(title_like), Post.body.contains(body_like))\n else:\n posts = posts.filter(Post.title.contains(title_like))\n posts = posts.all()\n\n data = json.dumps([post.as_dictionary() for post in posts])\n return Response(data, 200, mimetype=\"application/json\")", "def by_post_id(cls, post_id):\n return cls.all().filter('post_id =', post_id).order('-created').fetch(limit=20)", "def get_post(self, post_id):\n LOG.debug(\"Getting message from mattermost: %s\", post_id)\n url = '%s/api/v4/posts/%s' % (self.server_url, post_id)\n response = self._request(self._session.get, url)\n\n if response.status_code != 200:\n raise RuntimeError(\"Server unhappy. (%s)\", response)\n\n return response.content", "def get_page_or_temp(title):\n page = get_page(title)\n\n if page is None:\n page = Page(page_title=title)\n\n return page", "def get_title(self, article: BeautifulSoup):\n return self.get_text(article, self.parsing_template.title)", "def test_get_post_on_topic(self):\n url = reverse(\n 'post-detail',\n args=[\n self.topic1.url_name,\n self.post1.id\n ]\n )\n response = self.client.get(url)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n data = response.data\n self.assertEqual(data['id'], self.post1.id)\n self.assertEqual(data['title'], self.post1.title)", "def show_post(post_id):\n post = Post.query.get_or_404(post_id)\n tags = post.tags\n \n return render_template('posts/post.html', post=post, tags=tags)", "def query_by_id(_id: int) -> dict:\n post = Posts.query.filter_by(id=_id).first()\n if post is None:\n return {\"status\": 404, \"message\": \"No id Available\"}\n return {\n \"title\": post.title,\n \"body\": markdown.markdown(post.body),\n \"timestamp\": post.timestamp,\n \"id\": post.id,\n \"url\": make_url_from_title(post.title),\n }", "def all_title() -> list:\n return [i[\"title\"] for i in Blogs_Manager.TablePost.all_query()]", "def get_article_title(url):\n headers = {'User-Agent': 'Codeup Data Science'}\n response = get(url, headers=headers)\n soup = BeautifulSoup(response.content, \"html.parser\")\n title = soup.title.text\n return title", "def api_get_post(request, post_id):\n\n post = get_object_or_404(Post, id=post_id)\n\n json = serializers.serialize(\"json\", [post], fields=(\n \"pub_time\", \"_text_rendered\", \"title\", \"text\", \"image\",\n \"image_width\", \"image_height\", \"replies\", \"tags\"\n ))\n\n return HttpResponse(content=json)", "def get(self, book_title):\n return qh.get_book_title(book_title)", "def get_title():", "def get_object(self, pk):\n try:\n return JobTitle.objects.get(Q(id=pk) | Q(uid=pk))\n except JobTitle.DoesNotExist:\n raise Http404", "def post_get(id):\n post = session.query(Post).get(id)\n\n if not post:\n message = \"Could not find post with id {}\".format(id)\n data = json.dumps({\"message\": message})\n return Response(data, 404, mimetype=\"application/json\")\n\n data = json.dumps(post.as_dictionary())\n return Response(data, 200, mimetype=\"application/json\")", "def get_show_database_entry(show_database, title):\n try:\n return show_database._shows[title]\n except KeyError:\n raise ShowNotFoundError('Show <{!r}> not found in show database'.format(title))", "def fetch_title(url):\n # validate url.\n if \"http\" not in url or len(url) <= 11:\n return \"\"\n r = requests.get(url)\n if r:\n soup = BeautifulSoup(r.text, 'html.parser')\n try:\n title = soup.select(\"title\")[0].string\n except:\n title=\"\"\n else:\n title=\"\"\n return title", "def view_post(year, month, day, slug):\n post = Post.query.filter_by(slug=slug, pub_date=datetime.date(year, month, day)).first()\n return flask.render_template('post.html', post=post)", "def PolonaGetFirst(title:str):\n URL='https://polona.pl/api/entities/'\n PARAMS={'query':title, 'size':'1', 'public':'1'}\n\n r = requests.get(URL, PARAMS)\n data = r.json()\n e_id = data['hits'][0]['id']\n return e_id", "def get(self, id):\n post = Post.query.filter_by(id=id).first()\n if post is None:\n return { 'message': 'Post does not exist'}, 404\n\n return post_schema.dump(post)", "def show_post(request, slug):\n\n blogpost = get_object_or_404(BlogPost, slug=slug)\n template = 'blog/show_post.html'\n context = {\n 'blogpost': blogpost\n }\n return render(request, template, context)", "def _get_post(self):\n return self.get_object().content_object", "def _title(hit: DD) -> str:\n return hit[\"_source\"][\"title\"]", "def get_by_slug(self, slug):\n return self.get(slug=slug)", "async def get_post(self, ctx: commands.Context, postid: int):\n async with ctx.typing():\n data = await self.get_post_by_id(ctx, postid)\n post_e = await self.post_data_to_embed(data)\n attach = await self.get_file_from_post_data(data)\n\n await ctx.send(\n data['_'][\"message_content\"] if not data['_'][\"should_embed\"] else None,\n embed=post_e,\n # file=attach,\n )", "def PolonaSlug(title:str):\n e_id = PolonaGetFirst(title)\n\n #Get data of an entity\n URL='https://polona.pl/api/entities/'+e_id+'/'\n\n r = requests.get(URL,None)\n data = r.json()\n \n return data['slug']", "def show_post(slug):\n post = Post.query.filter_by(slug=slug).first()\n if not post: abort(404)\n if not session.get('logged_in') and not post.visible: abort(404)\n return render_template('post.html', post=post)", "def get_content_widget(widget_title):\n return ContentWidget.query.filter_by(title=widget_title).first()", "def get_poster(movie_id):\n return search.get_poster(movie_id)", "def search_by_title(title):\n\turl = tmdb_api(\"search/movie\")+\"&query=\"+urllib.quote_plus(title)\n\tresponse = json.load(urllib2.urlopen(url))\n\treturn JSONResponse(response)", "def get_post_info(cursor, post_id):\n post_info = None\n cursor.execute(\"SELECT * FROM posts WHERE url_id=?\", (post_id,))\n results = cursor.fetchone()\n if results:\n post_info = {\n 'url_id': results[1],\n 'title': results[2],\n 'content': results[3],\n 'time_posted': results[4],\n 'category': results[5],\n 'visibility': results[6]\n }\n return post_info", "def post_detail(request, post_pk, blog_name):\n # recuperar el post\n try:\n post = Post.objects.select_related().get(pk=post_pk)\n except Post.DoesNotExist:\n return render(request, '404.html', {}, status=404)\n except Post.MultipleObjectsReturned:\n return HttpResponse(\"Existen varios posts con ese identificador\", status=300)\n\n # preparar el contexto\n context = {\n 'post': post\n }\n\n # renderizar la plantilla\n\n return render(request, 'blogs/post-detail.html', context)", "def find_deck_by_title(self, deck_title):\n all_decks = self.get_decks(False)\n found = [d for d in all_decks if d.title == deck_title]\n if len(found) == 0:\n return None\n elif len(found) == 1:\n return self.get_deck(found[0].id)\n else:\n raise ValueError(\"Multiple decks with title '%s' found\"\n % deck_title)", "def get_title(doc):\r\n title_nodes = doc.getElementsByTagName('title')\r\n if len(title_nodes) > 0:\r\n return title_nodes[0].firstChild.nodeValue", "def title(self):\n return self.get(\"title\")", "def get_instance(self, instance):\n\n title = list(instance.keys())[0]\n instance = instance.get(title)\n return instance", "def entry(request, title):\n # check if the title string is a valid wiki entry.\n entry = util.get_entry(title)\n md = Markdown()\n if entry is not None:\n return render(request, \"encyclopedia/entry.html\", {\n \"entry\": md.convert(entry),\n \"title\": title\n })\n # render error if no wiki page exists \n else:\n return render(request, \"encyclopedia/pageError.html\")", "def get_by_id(data_base, id, commit_to_db=True):\n cursor = data_base.cursor(dictionary=True)\n cursor.execute(f\"SELECT * FROM post WHERE id = {id}\")\n fields = cursor.fetchone()\n cursor.close()\n if commit_to_db:\n fields['commit_to_db'] = commit_to_db\n try:\n return Post(**fields)\n except TypeError:\n return", "def post_detail(request, post_id):\n # Get the Post object corresponding to the id given in the URL.\n # If there is no corresponding Post, show an error page.\n post = get_object_or_404(Post, id=post_id)\n return render(request,\n 'blog/post_detail.html',\n {'post': post}\n )", "def get_post(request, post_id):\n\n post = get_object_or_404(Post, id=post_id)\n\n context = RequestContext(request)\n context['post'] = post\n if PARAMETER_TRUNCATED in request.GET:\n context[PARAMETER_TRUNCATED] = True\n\n return render(request, 'boards/api_post.html', context)", "def view_post(post_id):\n\n posts = models.Post.select().where(models.Post.id == post_id)\n if posts.count() == 0:\n abort(404)\n return render_template('detail.html', posts=posts)", "def get_title(self):\n return self.run_command('get_title')[0]", "def get_posts(url):\r\n feed = feedparser.parse(url)\r\n return feed.entries", "def _mw_fetch_article(self, baseurl, title):\n params = urllib.parse.urlencode({\n 'action': 'parse',\n 'page': title,\n 'prop': 'wikitext|headhtml',\n 'formatversion': 2,\n 'format': 'json',\n 'redirects': True\n })\n api_data = self._mw_api_call(baseurl, params)\n\n page_title = api_data['parse']['title']\n content = api_data['parse']['wikitext']\n html_head = api_data['parse']['headhtml']\n text = formatter.fmt(content, summary=True)\n\n soup = BeautifulSoup(html_head, features=\"lxml\")\n if canonical_link := soup.find('link', rel='canonical'):\n # Wikipedia\n url = canonical_link.attrs['href']\n elif og_url := soup.find('meta', property='og:url'):\n # Fandom\n url = og_url.attrs['content']\n else:\n # Use generic MediaWiki link as fallback (this doesn't look as nice)\n url = baseurl.replace('api.php', 'index.php?' + urllib.parse.urlencode({\n 'title': page_title\n }))\n\n return (text, url)" ]
[ "0.7257662", "0.6715822", "0.6635556", "0.66111004", "0.66026914", "0.65306", "0.64700407", "0.6466437", "0.64379615", "0.62551534", "0.6247926", "0.6238597", "0.6233855", "0.6216439", "0.61509603", "0.61505276", "0.6148375", "0.6121378", "0.61104965", "0.60835755", "0.60384434", "0.6033756", "0.6011574", "0.60014844", "0.59998286", "0.5998279", "0.5964593", "0.59639657", "0.59506696", "0.59366554", "0.5919268", "0.5916191", "0.59128344", "0.59062755", "0.58994144", "0.5887871", "0.58813643", "0.5880251", "0.586917", "0.5864503", "0.5852715", "0.5840144", "0.5837833", "0.5836576", "0.57743347", "0.5756921", "0.5741097", "0.57119924", "0.57100177", "0.5700732", "0.5663871", "0.56538963", "0.5640456", "0.5638017", "0.5604443", "0.5603577", "0.5601856", "0.559764", "0.55960315", "0.5591718", "0.5591568", "0.558598", "0.55680126", "0.5557223", "0.55309516", "0.55308187", "0.55191034", "0.55139554", "0.5510313", "0.5507818", "0.55024594", "0.5490399", "0.54813546", "0.5475447", "0.547376", "0.54611963", "0.5447388", "0.5445548", "0.54394484", "0.54365677", "0.5436246", "0.54323643", "0.5431181", "0.5430033", "0.54264563", "0.5422329", "0.5408789", "0.54064953", "0.53880966", "0.5380418", "0.537979", "0.53749675", "0.53513616", "0.5344103", "0.5343373", "0.5339542", "0.53317255", "0.53202677", "0.5319185", "0.5314805" ]
0.8569673
0
Creates and saves a User with the given email and password.
def _create_user(self, email, password, is_staff, is_superuser, **extra_fields): now = timezone.now() if not email: raise ValueError('The given email must be set') email = self.normalize_email(email) user = self.model(email=email, is_staff=is_staff, is_active=True, is_superuser=is_superuser, last_login=now, date_joined=now, **extra_fields) user.set_password(password) user.save(using=self._db) return user
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None, **extra_fields):\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n print(\"create user\")\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('The Email must be set'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('The Email must be set'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('The Email must be set'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n\n if not email:\n raise ValueError(\"Vous devez renseigner un email!\")\n\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('Please provide your email address'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('Email must be set'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(email, password='test', **kwargs):\n user = get_user_model().objects.create(email=email, **kwargs)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, first_name, last_name, password, **extra_fields):\n if not email:\n raise ValueError(_('Email Address is required'))\n email = self.normalize_email(email)\n user = self.model(\n email=email,\n first_name=first_name,\n last_name=last_name,\n **extra_fields\n )\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The Email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The Email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The Email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n\t\tif not email:\n\t\t\traise ValueError('The given email must be set')\n\t\temail = self.normalize_email(email)\n\t\tuser = self.model(email=email, **extra_fields)\n\t\tuser.set_password(password)\n\t\tuser.save(using=self._db)\n\t\treturn user", "def _create_user(self, email, password, **extra_fields):\n\t\tif not email:\n\t\t\traise ValueError('The given email must be set')\n\t\temail = self.normalize_email(email)\n\t\tuser = self.model(email=email, **extra_fields)\n\t\tuser.set_password(password)\n\t\tuser.save(using=self._db)\n\t\treturn user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(\"The given email must be set\")\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def signup(cls, username, first_name, last_name, email, password):\n\n hashed_pwd = bcrypt.generate_password_hash(password).decode('UTF-8')\n\n user = User(\n username=username,\n first_name=first_name,\n last_name=last_name,\n email=email,\n password=hashed_pwd,\n )\n\n db.session.add(user)\n return user", "def create_user(self, email, password=None, **extra_fields):\n\n if not email:\n raise ValueError('El usuario debe proporcionar un email')\n\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def create_user(self,email,password=None,**extra_fields):\n if not email:\n raise ValueError(\"Please provide an email\")\n user = self.model(email=self.normalize_email(email),**extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password,username, **extra_fields):\r\n if not email:\r\n raise ValueError('The given email must be set')\r\n if not username:\r\n raise ValueError('The given username must be set')\r\n email = self.normalize_email(email)\r\n user = self.model(email=email,username=str.strip(username), **extra_fields)\r\n user.set_password(password)\r\n user.save(using=self._db)", "def _create_user(self, email: str, password: str, **extra_fields) -> 'User':\n if not email:\n raise ValueError(\"The given email must be set.\")\n email = self.normalize_email(email).lower()\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def _create_user(self, email, password, **extra_fields):\n\n if not email:\n raise ValueError('The given email must be set')\n\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self, email, password, **extra_fields):\n validate_email(email)\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email: str, password: str, **extra):\n try:\n user = self.model(email=self.normalize_email(email),\n **extra)\n user.set_password(password)\n user.save(using=self._db)\n except IntegrityError as Ex:\n raise IntegrityError(\"Duplicate\")\n return user", "def _create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save()\n\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n \traise ValueError('Must provide a valid email address')\n\n now = timezone.now()\n user = self.model(\n email=self.normalize_email(email),\n date_joined=now,\n last_login=now,\n **extra_fields\n ) \n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, username, firstname, lastname, password, **other_fields):\n\n if not email:\n raise ValueError(_('You must provide an email address'))\n\n email = self.normalize_email(email)\n user = self.model(email=email, username=username, firstname=firstname, lastname=lastname, **other_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email=None, password=None, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, username=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password, **kwargs):\n if not email:\n raise ValueError('User must have email address')\n if not password:\n raise ValueError('User must have password')\n email = self.normalize_email(email)\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('User must have an email address')\n\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n\n user.set_password(password) # Ensure password being encrypted\n user.save(using=self._db) # Save objects in django\n\n return user", "def create_user(self, email: str, password: str, **kwargs: str) -> \"User\":\n email = self.normalize_email(email)\n user: \"User\" = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('Users must have an email address')\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('Users must have an email address')\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('Users must have an email address')\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self, username, email, password, **extra_fields):\n if not email:\n raise ValueError('The email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password, username, **extra_fields):\n if not email:\n raise ValueError(_('Email is required.'))\n if not username:\n raise ValueError(_('Username is required.'))\n email = self.normalize_email(email)\n username = username\n user = self.model(email=email, username=username, **extra_fields)\n user.set_password(password)\n user.save()\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('Users must have an email address')\n\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n\n email = self.normalize_email(email)\n #username = self.model.normalize_username(username)\n user = self.model( email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None, **extra_fields):\n if not email:\n raise ValueError('Please enter a valid email address')\n\n user = self.model(email=email.lower(), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self,email,password,**extra_fields):\n\t\tif not email:\n\t\t\traise ValueError('The given email must be set')\n\n\t\ttry:\n\t\t\twith transaction.atomic():\n\t\t\t\tuser = self.model(email=email,**extra_fields)\n\t\t\t\tuser.set_password(password)\n\t\t\t\tuser.save(using=self._db)\n\t\t\t\treturn user\n\t\texcept:\n\t\t\traise", "def _create_user(self, username, email, password, **extra_fields):\n if not username:\n raise ValueError('Username is required.')\n if not email:\n raise ValueError('Email is required.')\n if not password:\n raise ValueError('Password is required.')\n try:\n with transaction.atomic():\n user = self.model(username=username, email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user\n except:\n raise", "def create_user(self, email, username, first_name, last_name, password):\n\n email = self.normalize_email(email)\n\n user = self.model(\n email=email,\n username=username,\n first_name=first_name,\n last_name=last_name\n )\n\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def create_user(self, email, password=None, **extrac_fields):\n\n if not email:\n raise ValueError(\"User must have email\")\n\n email = self.normalize_email(email)\n\n user = self.model(email=email, **extrac_fields)\n\n user.set_password(password)\n\n user.save(using=self._db)\n\n return user", "def _create_user(self, email, password, first_name, last_name, **extra_fields):\n now = timezone.now()\n email = self.normalize_email(email)\n user = self.model(email=email,\n first_name=first_name,\n last_name=last_name,\n is_active=True,\n last_login=now,\n date_joined=now, **extra_fields)\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None, **extra_fields):\n # Rasie an error if the email is empty\n if not email:\n raise ValueError('User must have an email address')\n # Make the email to be lower case for every new user\n user = self.model(email=self.normalize_email(email), **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self, **fields):\n email = fields.pop('email')\n password = fields.get('password1')\n if not email:\n raise ValueError(\"Email address is required\")\n email = self.normalize_email(email)\n user = self.model(email=email, **fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, username, email, password, **other_fields):\n if not username or not email:\n raise ValueError(_('The email and username must be set.'))\n email = self.normalize_email(email)\n\n user = self.model(username=username, email=email, **other_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, name, password):\n\n if not email:\n raise ValueError(\"User must have an email address\")\n email = self.normalize_email(email)\n user = self.model(email=email)\n user.set_password(password)##encripts the password into HASH\n user.save(using=self._db)\n\n return user", "def _create_user(self, first_name, last_name, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n first_name = first_name\n last_name = self.last_name\n user = self.model(first_name, last_name,email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(email, password):\n\n user = User(email=email, password=password)\n \n db.session.add(user)\n db.session.commit()\n\n return user", "def _create_user(self, username, email, password, **extra_fields):\n if not username:\n raise ValueError('The given username must be set')\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n username = self.model.normalize_username(username)\n user = self.model(username=username, email=email, **extra_fields)\n user.password = make_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n\t\tif not email:\n\t\t\traise ValueError(\"Users must have an email address.\")\n\t\tuser = self.model(\n\t\t\temail = self.normalize_email(email)\n\t\t)\n\t\tuser.set_password(password)\n\t\tuser.save(using=self._db)\n\t\treturn user", "def _create_user(self, username, name,\n email, password, **extra_fields):\n if not email:\n raise ValueError('Email field is required')\n email = self.normalize_email(email)\n user = self.model(\n username=username,\n name=name,\n email=email,\n **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, name, email, password):\n new_user = User(name=name, email=email, password=password)\n db.session.add(new_user)\n db.session.commit()", "def Create_user(self, email, name, password):\n\n #validating user inputs\n if not email:\n raise ValueError('Users must have email address')\n \n #normalize email (converting all to lowercase)\n email = self.normalize_email(email)\n #create a new user object\n user = self.model(email= email, name=name)\n\n #setting the password\n user.set_password(password)\n user.save(using = self._db) #using the same model created for the profile\n\n return user", "def register(cls, username, email, password):\n\n hashed_password = bcrypt.generate_password_hash(password).decode(\"UTF-8\")\n user = User(username=username, email=email, password=hashed_password)\n db.session.add(user)\n\n return user", "def create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(_('The Email must be set'))\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n extra_fields.setdefault('is_active', True)\n user.save()\n return user", "def _create_user(self, first_name, last_name, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n now = timezone.now()\n email = self.normalize_email(email)\n user = self.model(\n email=email,\n first_name=first_name,\n last_name=last_name,\n is_active=True,\n is_activated=False,\n last_login=now,\n date_joined=now,\n **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError('Users Must Have an email address')\n user = self.model(\n email=self.normalize_email(email),\n )\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password):\n if not email:\n raise ValueError('Users must have an email address')\n if not password:\n raise ValueError('Password is required')\n\n user = self.model(\n email=self.normalize_email(email),\n )\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, first_name, last_name, password=None):\n if not email:\n raise ValueError(_('Users must have an email address'))\n\n user = self.model(\n email=self.normalize_email(email),\n first_name=first_name,\n last_name=last_name\n )\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n\n if not email:\n raise ValueError(\"Users must have an email address\")\n\n user = self.model(\n email=self.normalize_email(email))\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError(\"The given email must be set\")\n try:\n with transaction.atomic():\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.generate_activation_code()\n user.save(using=self._db)\n return user\n except:\n raise", "def create_user(self, username, email, password=None,commit=True):\n\n\n user = self.model(\n email=self.normalize_email(email),\n username = username\n )\n\n user.set_password(password)\n if commit:\n user.save(using=self._db)\n\n return user", "def create_user(self, email, first_name, last_name=None, password=None):\n if not email:\n raise ValueError('User must have an email-address')\n\n email = self.normalize_email(email)\n user = self.model(email=email, first_name=first_name, last_name=last_name)\n\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self, username, email, password, **extra_fields):\n if not username:\n raise ValueError('The given username must be set')\n email = self.normalize_email(email)\n username = self.model.normalize_username(username)\n user = self.model(username=username, email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError('Users must have an email address')\n\n user = self.model(email=self.normalize_email(email))\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError('Users must have an email address')\n user = self.model(email=self.normalize_email(email))\n user.set_password(password)\n user.save(using=self._db)\n\n return user", "def _create_user(self, username, email, password):\n\t\tnow = datetime.now()\n\t\tif username is None:\n\t\t\traise ValueError('Must include username')\n\t\tif email is None:\n\t\t\traise ValueError('Must include email')\n\t\temail = self.normalize_email(email)\n\t\tuser = self.model(\n\t\t\temail=self.normalize_email(email),\n\t\t\tusername=username,\n\t\t\tdate_joined=now\n\t\t)\n\t\tuser.set_password(password)\n\t\tuser.save(using=self._db)\n\t\treturn user", "def create_user(email='[email protected]', password='testpass123'):\n return get_user_model().objects.create_user(email=email, password=password)", "def _create_user(self, email, name, password, **extra_fields):\n if not email:\n raise ValueError('Users must have an email address')\n\n email = self.normalize_email(email)\n user = self.model(email=email, name=name, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n user.set_permissions(extra_fields.get('role'))\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError('Users must have an email address')\n\n user = self.model(\n email=self.normalize_email(email),\n )\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError(\"Users must have an email address\")\n\n user = self.model(\n email=self.normalize_email(email),\n )\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None):\n if not email:\n raise ValueError('Users must have an email address')\n\n user = self.model(\n email=self.normalize_email(email),\n )\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None, **extra_fields):\n now = timezone.now()\n if not email:\n raise ValueError('The given email must be set')\n email = CBUserManager.normalize_email(email)\n user = self.model(email=email,\n is_staff=False, is_active=True, is_superuser=False,\n last_login=now, date_joined=now, **extra_fields)\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, password=None, **extra_fields):\n now = timezone.now()\n if not email:\n raise ValueError('The given email must be set')\n email = CBUserManager.normalize_email(email)\n user = self.model(email=email,\n is_staff=False, is_active=True, is_superuser=False,\n last_login=now, date_joined=now, **extra_fields)\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(email, password):\n try:\n User(email=email, password=password)\n except IntegrityError:\n print('Error: Duplicate email address')", "def create_user(self, email, first_name, last_name, password=None):\n if not email:\n raise ValueError('User must have an email address')\n\n # normalizing email for standarization\n email = self.normalize_email(email) \n # creating user model that user manager is representing\n user = self.model(email=email, first_name=first_name, last_name=last_name)\n # Encrypting password using method of AbstractBaseUserClass\n user.set_password(password)\n # self._db to save to any database \n user.save(using=self._db)\n\n return user", "def create_user(self, email, name, password=None):\n try:\n email = self.normalize_email(email)\n user = self.model(email=email, name=name)\n user.set_password(password)\n # This saves the password as hash object\n user.save(using=self._db)\n # Since there can be many dbs in our app, the\n # best practice is to save the user in current db.\n return user\n except Exception as e:\n raise", "def create_user(self,email,password=None, **extra_fields):\n\n if not email: \n raise ValueError('Users must have an email address')\n #sets the email field of your user model, this is done on the model itself because there are no functions to change it.\n user = self.model(email=self.normalize_email(email), **extra_fields) \n user.set_password(password)\n user.save(using=self._db) #save using the defualt database in the settings.py file.\n\n return user", "def create_user(self, email, username, password=None):\n if not email:\n raise ValueError('Users must have an email address')\n if not username:\n raise ValueError('Users must have a username')\n \n user = self.model(email = self.normalize_email(email),\n username = username)\n \n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, email, username, password=None):\n if not email:\n raise ValueError('The given email must be set')\n if not username:\n raise ValueError('The given username must be set')\n \n user = self.model(email=self.normalize_email(email), username=username)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_user(self, username, email, password=None):\n\n if not username:\n raise ValueError('Users must have an username')\n if not email:\n raise ValueError('Users must have an email address')\n\n user = self.model(\n username=username,\n email=self.normalize_email(email),\n )\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_a_user(self, username='fry', email='[email protected]', password='Qwerty!234'):\n user = User.objects.create_user(username, email, password)\n user.save()\n return user", "def create(cls, name, username, email, password):\n new_user = cls(name=name,\n username=username,\n email=email\n )\n new_user.password = bcrypt.generate_password_hash(\n password).decode('utf-8')\n\n db.session.add(new_user)\n db.session.commit()\n\n return new_user", "def create_user(self, email=None, name=None, password=None, phone=None):\n # if not email:\n # raise ValueError('Users must have an email address')\n\n user = self.model(\n email=email,\n name=name,\n phone=phone\n )\n\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create_new_user(cls, user_email, user_password, user_phone):\n\n new_user = User(email=user_email, password=user_password, mobile_phone=user_phone)\n\n db.session.add(new_user)\n db.session.commit()\n\n print \"Successfully added new user with the email: %s\" % user_email", "def create_user(self, name, email, password):\n\t\tif not name:\n\t\t\traise ValueError('You forgot to enter a name!')\n\t\tif not email:\n\t\t\traise ValueError('You forgot to enter an email address!')\n\t\tif not password:\n\t\t\traise ValueError('You forgot to enter a password!')\n\t\ttry:\n\t\t\tvalidate_email(email)\n\t\texcept ValidationError:\n\t\t\traise ValueError('The email address entered is invalid.')\n\t\tif User.objects.filter(email=email).count() > 0:\n\t\t\traise ValueError('The email address entered is already registered.')\n\t\tif len(password) < 8:\n\t\t\traise ValueError('The password must be at least 8 characters.')\n\t\t\n\t\tuser = self.model(\n\t\t\tname=name,\n\t\t\temail=self.normalize_email(email),\n\t\t)\n\t\t\n\t\tuser.set_password(password)\n\t\tuser.save(using=self._db)\n\t\treturn user" ]
[ "0.84163386", "0.8407691", "0.8396383", "0.83900774", "0.83900774", "0.83900774", "0.83818454", "0.83800155", "0.83791924", "0.83788806", "0.83781564", "0.83751047", "0.83726776", "0.83633083", "0.83633083", "0.83633083", "0.8360308", "0.8360308", "0.8347122", "0.8345504", "0.8345504", "0.8345504", "0.8345504", "0.8345504", "0.8345504", "0.8345504", "0.8345504", "0.8334452", "0.83250177", "0.83218974", "0.83186316", "0.831837", "0.83115476", "0.83111054", "0.8309033", "0.8303709", "0.8302135", "0.8295237", "0.8281763", "0.82788795", "0.82738054", "0.82665956", "0.8256439", "0.82458824", "0.82306653", "0.82306653", "0.82306653", "0.82291484", "0.8223603", "0.822344", "0.82220304", "0.8218027", "0.8213622", "0.821293", "0.82121533", "0.8196599", "0.8183104", "0.8181538", "0.81753594", "0.81724024", "0.8167956", "0.8149421", "0.81391144", "0.813376", "0.813038", "0.812851", "0.8127302", "0.81178594", "0.8114436", "0.81122905", "0.81001174", "0.8080533", "0.80795807", "0.80789137", "0.8075503", "0.80712754", "0.8053893", "0.8049194", "0.80441505", "0.80298764", "0.8016008", "0.8014379", "0.80139726", "0.8011292", "0.79918736", "0.7989007", "0.79844564", "0.7980518", "0.7980518", "0.7978378", "0.79774547", "0.797169", "0.7970018", "0.79672307", "0.79640156", "0.79614455", "0.79570544", "0.7954799", "0.79524994", "0.7950434", "0.79418916" ]
0.0
-1
Returns the short name for the user required for admin.
def get_short_name(self): return self.full_name.split(' ')[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n # The user is identified by the email address\n return self.email", "def get_short_name(self):\n # The user is identified by their email address\n return self.first_name", "def get_short_name(self):\n return f\"{self.first_name} {self.last_name[:1]}\" if self.first_name else self.username", "def admin_username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"admin_username\")", "def admin_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"admin_username\")", "def admin_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"admin_username\")", "def admin_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"admin_username\")", "def get_short_name(self):\n\t\treturn self.email", "def get_user_display_name(self):\n return self.user.get_full_name() or self.user.get_username()", "def user_name(self):\n return lamin_user_settings().name", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def get_displayname(self):\n return self.full_name or self.user.username", "def get_short_name(self):\n\n return self.email", "def get_admin_username(self) -> str:\n # read the original value passed by the command\n admin_username = self.raw_param.get(\"admin_username\")\n # try to read the property value corresponding to the parameter from the `mc` object\n if (\n self.mc and\n self.mc.linux_profile and\n self.mc.linux_profile.admin_username is not None\n ):\n admin_username = self.mc.linux_profile.admin_username\n\n # this parameter does not need dynamic completion\n # this parameter does not need validation\n return admin_username", "def full_name_short(self):\n return \"{}. {}\".format(str(self.user.first_name)[:1], self.user.last_name)", "def full_name(self):\n return self.user.get_full_name() or None", "def display_name(self) -> str:\n return self.requester.username", "def name(self) -> str:\n return self.user.name", "def get_user_fullname(self):\n return self.applicant.userprofile.display_name()", "def get_short_name(self):\r\n return self.first_name", "def get_name(self):\n return self.user.username if self.user.username else self.user.email", "def get_short_name(self) -> str:\n return self.first_name", "def user_name(self) -> str:\n return pulumi.get(self, \"user_name\")", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def get_short_name(self):\n return self.first_name", "def user_name(self):\n return self._user_name", "def get_user_name(self):\n full_name = f'{self.f_name} {self.l_name}'\n return full_name", "def shortname(self):\n return self.get(\"shortName\")", "def get_short_name(self):\n\n return self.first_name", "def __str__(self):\n return self.user.get_full_name()", "def user_name(self):\n\n return self._user_name", "def _get_user_name(self):\n if self.runtime.get_real_user is None:\n return 'staff'\n else:\n return self.runtime.get_real_user(self.runtime.anonymous_student_id).username", "def short_name(self):\n return self.get(\"short_name\", decode=True)", "def full_name(self):\n return \"{} {}\".format(self.user.first_name, self.user.last_name)", "def short_name(self) -> str:\r\n\t\treturn f'{self.last_name} {self.first_name}'", "def get_short_name(self):\n return self.last_name", "def admin_site_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"admin_site_name\")", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def get_short_name(self):\n return self.name", "def admin_user(self) -> pulumi.Input['LabVirtualMachineAdminUserArgs']:\n return pulumi.get(self, \"admin_user\")", "def user_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"user_name\")", "def get_short_name(self):\r\n return self.name", "def getName(self):\n return self.__username", "def admin_group_name(self):\n return self.short_name+\"_admins\"", "def username(self):\n return self.user.username", "def getShortName(self) -> str:\n return self.short_name", "def get_full_name(self):\n # The user is identified by their email address\n return self.first_name+' '+self.last_name", "def __str__(self) -> str:\n\n return self.user.get_full_name()", "def short_displayname(self):\n return self.get_short_displayname()", "def get_short_name(self):\n\n return self.name", "def get_short_name(self):\n\n return self.name", "def get_short_name(self):\n\n return self.name", "def getUserName(self):\n userType = self.env['res.users']\n \n uiUser = userType.browse(self._uid)\n return uiUser.name", "def get_username(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip()[0:1])\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def user_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_name\")", "def username(self) -> str:", "def username(self) -> str:", "def getUserName(self):\n user = User.by_id(self.user_id)\n return user.name", "def get_name(self):\n user = self.user\n name = \"%s %s\" % (user.first_name, user.last_name)\n name = name.strip()\n\n return self.display_name or name or user.email or user.username", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def full_name(self, obj: User) -> str:\n return obj.get_full_name()", "def get_project_admin_instance_name(self):\n\n return get_project_admin_instance_name(self.short_name)", "def get_fullname(self):\n if self.controller.oem_config:\n return 'OEM Configuration (temporary user)'\n return self.fullname", "def get_user_fullname(self):\n member = self.get_user()\n if member:\n return member.getProperty('fullname')", "def get_full_name(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip())\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def get_current_user_full_name(self):\n user_service = self.runtime.service(self, 'user')\n xb_user = user_service.get_current_user()\n\n return xb_user.full_name", "def super_user(self) -> Optional[str]:\n return pulumi.get(self, \"super_user\")", "def username(self, instance):\r\n return instance.user.username", "def __str__(self):\n return \"{}\".format(self.user.username)", "def get_full_name(self):\n full_name = f'{self.first_name} {self.last_name}' if self.first_name and self.last_name else self.username\n return full_name.strip()", "def get_username(self):\n return str(getattr(self, self.USERNAME_FIELD))", "def get_info_admin(self):\n return self.get_info(\"HS_ADMIN\")" ]
[ "0.84520036", "0.84520036", "0.84520036", "0.81706136", "0.7997721", "0.78412324", "0.774481", "0.76969534", "0.76969534", "0.76969534", "0.758758", "0.7569117", "0.753553", "0.7458962", "0.7458962", "0.7417198", "0.7403298", "0.7393031", "0.73588586", "0.73505044", "0.73087585", "0.72606295", "0.72172624", "0.7210393", "0.7206011", "0.72034055", "0.717049", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7169244", "0.7115409", "0.7071148", "0.70709324", "0.70518154", "0.69558394", "0.6946564", "0.694624", "0.69187164", "0.6917904", "0.6894817", "0.688783", "0.68644816", "0.68593043", "0.68593043", "0.68593043", "0.68593043", "0.68593043", "0.68593043", "0.68593043", "0.68593043", "0.68579996", "0.68527144", "0.6840197", "0.6830803", "0.6830216", "0.6815217", "0.68107843", "0.68106055", "0.67910576", "0.6786491", "0.6780574", "0.6759665", "0.6759665", "0.6733477", "0.67291814", "0.6723451", "0.6718976", "0.6718976", "0.67142767", "0.6707573", "0.67048216", "0.67048216", "0.67048216", "0.66937655", "0.66775954", "0.66745406", "0.6663346", "0.666153", "0.66551757", "0.66543615", "0.6645982", "0.6636572", "0.66232544", "0.65932614", "0.657442" ]
0.6856764
66
Checks whether the user has activated their account.
def is_pending_activation(self): if (self.auth_token_is_used and self.is_active): return False else: return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_active(self, user):\r\n if not self.require_active:\r\n # Ignore & move on.\r\n return True\r\n\r\n return user.is_active", "def user_is_activated(self, user_name):\n return not self._simultanious_log_ins and \\\n user_name in self._active_users_names", "def test_activate_user(self):\n activated_user = (RegistrationProfile.objects\n .activate_user(self.activation_key))\n self.assertTrue(activated_user.registrationprofile.activated)\n self.assertFalse(activated_user.is_active)", "def activate_user(self, user):\n if not user.active:\n user.active = True\n return True\n return False", "def activate_user(self, email):\r\n activation_key = Registration.objects.get(user__email=email).activation_key\r\n # and now we try to activate\r\n check_for_get_code(self, 200, reverse('activate', kwargs={'key': activation_key}))\r\n # Now make sure that the user is now actually activated\r\n self.assertTrue(User.objects.get(email=email).is_active)", "def test_activate_active_user(self):\n activate_user(self.user, self.request)\n self.assertEqual(self.user.is_active, True)", "def is_active(self):\n return self.status == ACTIVE_USER", "def is_active(self):\n return self.user.is_active", "def activate_user(self, activation_key):\n if SHA1_RE.search(activation_key):\n try:\n profile = RegistrationProfile.objects.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return False\n if not profile.activation_key_expired():\n user = profile.user\n user.is_active = True\n user.save()\n profile.activation_key = \"ALREADY_ACTIVATED\"\n profile.save()\n return user\n\n return False", "def is_activated(self):\n return self._activated", "def activate_user(self, user):\n if not user.active:\n user.active = True\n # noinspection PyUnresolvedReferences\n self.save(user)\n return True\n\n return", "def activate_user(self, activation_key):\n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point trying to look it up in\n # the database.\n try:\n profile = self.get(admin_key=activation_key)\n except self.model.DoesNotExist:\n return False, False\n user = profile.user\n activated = False\n if not user.is_active:\n user.is_active = True\n user.save()\n activated = True\n return (activated, user)", "def is_active(self):\n if self.wallet < 25:\n return True\n else:\n return False", "def activate_user(self, activation_key):\n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point even trying to look it up\n # in the DB.\n if SHA1_RE.search(activation_key):\n try:\n user_profile = self.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return False\n if not user_profile.activation_key_expired():\n # Account exists and has a non-expired key. Activate it.\n user = user_profile.user\n user.is_active = True\n user.save()\n return user\n return False", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n _, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def activate_user(self, activation_key):\r\n # Make sure the key we're trying conforms to the pattern of a\r\n # SHA1 hash; if it doesn't, no point trying to look it up in\r\n # the database.\r\n if SHA1_RE.search(activation_key):\r\n try:\r\n profile = self.get(activation_key=activation_key)\r\n except self.model.DoesNotExist:\r\n return False\r\n if not profile.activation_key_expired():\r\n user = profile.user\r\n user.is_active = True\r\n user.save()\r\n profile.activation_key = \"ALREADY_ACTIVATED\"\r\n profile.save()\r\n return user\r\n return False", "def test_valid_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIsInstance(user, UserModel())\n self.assertEqual(user.id, new_user.id)\n self.assertFalse(user.is_active)\n self.assertTrue(activated)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activated)", "def test_valid_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIsInstance(user, UserModel())\n self.assertEqual(user.id, new_user.id)\n self.assertTrue(user.is_active)\n self.assertTrue(activated)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activated)", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertEqual(user, new_user)\n self.assertFalse(activated)", "def activate_user(self, username):\n args = parser_activate.parse_args()\n isActive = request.json.get('isactive')\n\n query = \"\"\"UPDATE users SET isactive=%s WHERE username=%s\"\"\"\n values = isActive, username\n\n conn = self.db\n cursor = conn.cursor()\n cursor.execute(query, values)\n conn.commit()\n return True", "def activate_user(cls, activation_key):\n #from registration.signals import user_activated\n \n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point trying to look it up in\n # the database.\n db = DB_Session()\n if SHA1_RE.search(activation_key):\n query = db.query(RegistrationProfile)\n profile = query.filter(RegistrationProfile.activation_key == activation_key).one()\n if not profile:\n return False\n if not profile.activation_key_expired():\n user = profile.user\n user.is_active = 1\n profile.activation_key = RegistrationProfile.ACTIVATED\n db.flush()\n db.commit()\n db.close()\n #user_activated.send(sender=self.model, user=user)\n return user\n return False", "def is_invited_pending_activation(self):\n if self.registration_method == self.INVITED \\\n and self.is_pending_activation():\n return True\n else:\n return False", "def is_active_user(self):\n\n return self.is_active", "def check_account_status(request):\n\n user = request.user\n\n if not user.is_authenticated():\n return {\n 'current_user': user,\n 'check_account_status_url': reverse('check_account_status'),\n }\n\n session = request.session\n\n flag = session.get('show_email_confirmation_dialog', True)\n show = not user.has_activated_account and flag\n session['show_email_confirmation_dialog'] = False\n\n # We don't want so show email confirmation when use is trying to buy a ticket.\n if 'payment-details' in request.path:\n show = False\n\n return {\n 'current_user': user,\n 'show_email_confirmation_dialog': False,\n 'check_account_status_url': reverse('check_account_status'),\n }", "def activate_account(self, activation_key):\n try:\n registration_profile = self.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return None\n\n if not registration_profile.is_expired():\n user = registration_profile.user\n user.is_active = True\n user.save()\n registration_profile.delete()\n return user\n else:\n return None", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def activate(request, activation_key):\n profile = get_object_or_404(User, activation_key=activation_key)\n if profile.akey_expires < timezone.now():\n return render('user_account/activate.html', {'expired': True})\n\n profile.save(update_fields=['active', 'activation_key'])\n return render(\n 'user_account/activate.html',\n {'success': True, 'name': profile.name + \" \" + profile.surname}\n )", "def account_activate(request, uidb64, token):\r\n try:\r\n # decode the user's id and get the user by id.\r\n user_id = smart_str(urlsafe_base64_decode(uidb64))\r\n user = get_object_or_404(User, id=user_id)\r\n if user.is_active:\r\n # Display already activated account message\r\n messages.success(request, f'Your Account already activated. You can login.', extra_tags='activation-valid')\r\n # check if the token is valid.\r\n elif account_activation_token.check_token(user, token):\r\n user.is_active = True\r\n # user.previously_logged_in = True\r\n user.save()\r\n # Display activation success message\r\n messages.success(request, f'Your Account has been activated successfully. Now you can login.', extra_tags='activation-valid') \r\n else:\r\n # Display error message.\r\n messages.error(request, f'The activation link is invalid. Please request a new one.', extra_tags='activation-invalid') \r\n except DjangoUnicodeDecodeError:\r\n # Display error message.\r\n messages.error(request, f'The activation link is invalid. Please request a new one.', extra_tags='activation-invalid') \r\n return redirect('accounts:login')", "def test_user_activation(self):\n user = User.objects.get()\n response = self.client.get(reverse('accounts:user-activate',\n kwargs={'uidb64': urlsafe_base64_encode(force_bytes(user.pk)),\n 'token': account_activation_token.make_token(user)}))\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def is_activated(self):\n return is_in_radius(self, self.target_player, self.working_radius)", "def testInitialUserInactivated(self):\r\n u = User()\r\n u.email = gen_random_word(10)\r\n DBSession.add(u)\r\n\r\n self.assertEqual(\r\n False,\r\n u.activated,\r\n 'A new signup should start out deactivated by default')\r\n self.assertTrue(\r\n u.activation.code is not None,\r\n 'A new signup should start out as deactivated')\r\n self.assertEqual(\r\n 'signup',\r\n u.activation.created_by,\r\n 'This is a new signup, so mark is as thus')", "def activate(self, *args, **kwargs):\n username = self.validate_key(kwargs.get(\"activation_key\"))\n user = self.get_user(username)\n user.is_active = True\n user.save()\n return user", "def check_user_exists(self):\n is_exists = False\n if auth.UserInfo.objects.filter(\n user_id__username=self.username,\n is_active=True).exists():\n is_exists = True\n return is_exists", "def confirm_login_allowed(self, user):\r\n if not user.is_active:\r\n raise forms.ValidationError(\r\n self.error_messages['inactive'],\r\n code='inactive',\r\n )", "def activate(request, activation_key, template_name='registration/activate.html'):\n activation_key = activation_key.lower() # Normalize before trying anything with it.\n account = RegistrationProfile.objects.activate_user(activation_key)\n account.is_active = True\n account.save()\n return render(request, template_name,\n { 'account': account,\n 'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS })", "def _checkUserInactive(username,self):\r\n active = False\r\n user = _findUser(username)\r\n \r\n if user is not None:\r\n active = user.getIsActive()\r\n \r\n return active", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def activation_key_expired(self):\r\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\r\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\r\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def activate(request, uidb64, token):\r\n\ttry:\r\n\t\tuid = force_text(urlsafe_base64_decode(uidb64))\r\n\t\tuser = User.objects.get(pk=uid)\r\n\texcept (TypeError, ValueError, OverflowError, User.DoesNotExist):\r\n\t\tuser = None\r\n\r\n\tif user is not None and account_activation_token.check_token(user, token):\r\n\t\t# User activated and redirected to the homepage\r\n\t\tuser.is_active = True\r\n\t\tuser.profile.email_confirmed = True\r\n\t\tuser.save()\r\n\t\tlogin(request, user, backend='django.contrib.auth.backends.ModelBackend')\r\n\t\tgames = Game.objects.all()\r\n\t\treturn redirect('/', {'games': games, 'MEDIA_URL': settings.MEDIA_URL})\r\n\telse:\r\n\t\treturn render(request, 'account_activation_invalid.html')", "def test_activation_deactivated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n # Deactivate the new user.\n new_user.is_active = False\n new_user.save()\n\n # Try to activate again and ensure False is returned.\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def activation_expired(self):\n return self.date_joined + timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) < timezone.now()", "def activated_user(self):\n user = self.signup_user_two()\n user.is_active = True\n user.save()\n return user", "def activateWebAppUser( self, username, activation_code ):\n try:\n con = self.getMetadataDatabaseConnection()\n user_data = con.cursor()\n\n con.cursor().callproc('verify_user_activation_code', [username, activation_code, user_data])\n row = user_data.fetchone()\n if row:\n con.cursor().callproc('activate_user_account', [username])\n return True\n else:\n return False\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)\n return False", "def activate(request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except(TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n if user is not None and account_activation_token.check_token(user, token):\n user.is_active = True\n user.save()\n return render(request, 'accounts/active_done.html')\n else:\n return HttpResponse('Activation link is invalid!')", "def is_active(self) -> bool:", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def test_func(self):\n return self.request.user.is_active # any active user", "def confirm(id):\n #: get resources\n user = User.query.get_or_404(id)\n service = SignUpService(user)\n input_token = request.args['token']\n\n #: active current account\n try:\n service.active(input_token)\n except TokenUsedError:\n message = _(u\"The account had been actived.\")\n return render_template(\"confirm-failed.html\", message=message), 403\n except TokenWrongError:\n message = _(u\"The active token is invalid.\")\n return render_template(\"confirm-failed.html\", message=message), 403\n\n #: automatic sign in\n session_login(user)\n #: output a success message\n message = _(u\"The account has been actived successfully.\")\n return render_template(\"confirm-success.html\", message=message)", "def activate(self):\n if not self.is_active:\n self.is_active = True\n self.activated_at = datetime.datetime.utcnow()\n import messaging # avoid circular import\n messaging.send_activated_emails(self)\n self.save()", "def activate_account(request, key):\r\n r = Registration.objects.filter(activation_key=key)\r\n if len(r) == 1:\r\n user_logged_in = request.user.is_authenticated()\r\n already_active = True\r\n if not r[0].user.is_active:\r\n r[0].activate()\r\n already_active = False\r\n\r\n # Enroll student in any pending courses he/she may have if auto_enroll flag is set\r\n student = User.objects.filter(id=r[0].user_id)\r\n if student:\r\n ceas = CourseEnrollmentAllowed.objects.filter(email=student[0].email)\r\n for cea in ceas:\r\n if cea.auto_enroll:\r\n CourseEnrollment.enroll(student[0], cea.course_id)\r\n\r\n resp = render_to_response(\r\n \"registration/activation_complete.html\",\r\n {\r\n 'user_logged_in': user_logged_in,\r\n 'already_active': already_active\r\n }\r\n )\r\n return resp\r\n if len(r) == 0:\r\n return render_to_response(\r\n \"registration/activation_invalid.html\",\r\n {'csrf': csrf(request)['csrf_token']}\r\n )\r\n return HttpResponse(_(\"Unknown error. Please e-mail us to let us know how it happened.\"))", "def is_active(self) -> bool:\n if not self.expires_at:\n return False\n return self.expires_at > datetime.datetime.now()", "def activate_account_api():\n\n # get the data for this query\n data = request.get_json()\n if not data:\n response = jsonify({\n 'success': False,\n 'message': 'Missing request body'\n })\n response.status_code = 422\n return response\n\n # process arguments\n arg_email = data.get('email').strip().lower()\n\n # check if there is a user with this activation_link\n secret_link = data.get('secret_link')\n user = db.session.query(User).filter(\n User.activation_link == secret_link,\n ).one_or_none()\n if not user:\n response = jsonify({\n 'success': False,\n 'message': 'This activation link is no longer active. Contact your system administrator to receive a new one.'\n })\n response.status_code = 200\n return response\n\n # check if this user has already activated their account\n if user.activated:\n response = jsonify({\n 'success': False,\n 'message': 'This account has already been activated. Try forgot password to recover your password.'\n })\n response.status_code = 200\n return response\n\n # check if the correct email address was supplied\n if user.email != arg_email:\n response = jsonify({\n 'success': False,\n 'message': 'This is not the correct email for this activation link. Contact your system administrator to request a link for this email.'\n })\n response.status_code = 200\n return response\n\n # generate and set new password\n new_password = generate_password_hash(data.get('password'))\n user.password = new_password\n user.activated = True\n db.session.add(user)\n db.session.commit()\n\n # log that a user just activated their account\n _log('++ {} just activated their account'.format(user.email), '_signup')\n\n # return authenticated token\n token = generate_auth_token(user_id=user.user_id)\n response = jsonify({\n 'success': True,\n 'token': token\n })\n response.status_code = 200\n return response", "def confirm_login_allowed(self, user):\n # if the user has been disabled due to incorrect\n # password retries or other.\n if not user.is_active:\n return False; \n return True", "def activate(request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except (TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n\n if user is not None and account_activation_token.check_token(user, token):\n user.is_active = True\n user.profile.email_confirmed = True\n user.save()\n login(request, user)\n return redirect('home')\n else:\n return render(request, 'registration/activation_invalid.html')", "def is_profile_complete(self):\n return bool(self.fullname and self.username and self.email)", "def _activate_user(self, email):\r\n activation_key = registration(email).activation_key\r\n\r\n # and now we try to activate\r\n resp = self.client.get(reverse('activate', kwargs={'key': activation_key}))\r\n return resp", "def test_non_activated_account(self):\r\n # When all the conditions are satisfied, the account should be deleted.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation: ' + str(len(activations)))\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users: ' + str(len(users)))\r\n activations[0].valid_until = datetime.utcnow() - timedelta(days=35)\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 0,\r\n len(activations),\r\n 'There should be no activations left')\r\n self.assertEqual(\r\n 1,\r\n len(users),\r\n 'We should have a total of 1 user still: ' + str(len(users)))\r\n # When the account is activated, it should not be deleted.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation: ' + str(len(activations)))\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users: ' + str(len(users)))\r\n users[1].activated = True\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation still')\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users still: ' + str(len(users)))\r\n # When the account last login is not None, it should not be deleted.\r\n # This happens when a user forgets his/her password.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 2,\r\n len(activations),\r\n 'We should have a total of 2 activations')\r\n self.assertEqual(\r\n 3,\r\n len(users),\r\n 'We should have a total of 3 users: ' + str(len(users)))\r\n users[2].last_login = datetime.utcnow()\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 2,\r\n len(activations),\r\n 'We should have a total of 2 activations still')\r\n self.assertEqual(\r\n 3,\r\n len(users),\r\n 'We should have a total of 3 users still: ' + str(len(users)))\r\n # The account should not be deleted before 30 days since signup.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 3,\r\n len(activations),\r\n 'We should have a total of 3 activations')\r\n self.assertEqual(\r\n 4,\r\n len(users),\r\n 'We should have a total of 4 users: ' + str(len(users)))\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 3,\r\n len(activations),\r\n 'We should have a total of 3 activations still')\r\n self.assertEqual(\r\n 4,\r\n len(users),\r\n 'We should have a total of 4 users still')\r\n # The account details should be shown if it is not asked to delete.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 4,\r\n len(activations),\r\n 'We should have a total of 4 activations')\r\n self.assertEqual(\r\n 5,\r\n len(users),\r\n 'We should have a total of 5 users: ' + str(len(users)))\r\n account_signup = datetime.utcnow() - timedelta(days=35)\r\n activations[3].valid_until = account_signup\r\n account_details = UserMgr.non_activated_account(delete=False)\r\n self.assertEqual(\r\n email,\r\n account_details[0].email)\r\n self.assertEqual(\r\n False,\r\n account_details[0].activated)\r\n self.assertEqual(\r\n u'testcase',\r\n account_details[0].invited_by)", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def is_active(self):\n return True", "def IsActive(self):\n return True", "def has_permission(self, request):\n return request.user.is_active", "def has_permission(self, request):\n return request.user.is_active", "def has_permission(self, request):\n return request.user.is_active", "def check_activated(self, endpoint):\n _autoactivate = False\n endpoint_data = self.get_endpoint_data(endpoint=endpoint, fields=\"activated,expires_in\")\n if not endpoint_data:\n return 0\n _endpoint_activated = endpoint_data[\"activated\"]\n\n if _endpoint_activated:\n _expires_in = int(endpoint_data[\"expires_in\"])\n if _expires_in < self.settings.globus_min_activation_time:\n _autoactivate = True\n else:\n _autoactivate = True\n if _autoactivate:\n logger.info(\"Endpoint %s not activated, attempting autoactivate\" % endpoint)\n _autoactivate_data = self.autoactivate(endpoint=endpoint)\n _expires_in = int(_autoactivate_data[\"expires_in\"])\n\n if not _expires_in:\n return 0\n else:\n return _expires_in", "def active(self):\n if self._active is not None:\n return self._active\n # Try to get it from the userprofile\n try:\n self._active = self.userprofile.user.is_active\n except UserProfile.DoesNotExist:\n # User profile does not exist.\n # The default value for active is False.\n self._active = False\n return self._active", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == RegistrationProfile.ACTIVATED or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def test_active_account_and_expired_accountactivation_key_expired(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n profile.refresh_from_db()\n self.assertTrue(profile.activation_key_expired())", "def active(self) -> bool:\n return self.relay(\"active\")", "def test_resend_activation_email_activated_user(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n\n profile = self.registration_profile.objects.get(user=user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertTrue(user.is_active)\n self.assertTrue(activated)\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def user_auth(request):\n if request.user.is_authenticated:\n user = User.objects.get(email=request.user.email)\n if UserInformation.objects.filter(user=user).exists():\n return True\n return False", "def account_activation_sent(request):\n current_user = request.user\n if current_user.is_authenticated():\n return HttpResponseRedirect('/')\n return render(request, 'registration/activation_complete.html')", "def activate(self):\r\n if self.activation_code == '':\r\n raise ValidationError('The member is already activated')\r\n signer = TimestampSigner()\r\n signer.unsign(self.activation_code, max_age=timedelta(days=2))\r\n self.hidden = False\r\n self.activation_code = ''\r\n self.joined_date = timezone.now()\r\n self.save()", "def check_if_event_confirmed_user(request):\n\n if request.user.is_anonymous():\n user_activated = False\n else:\n user_activated = request.user.has_activated_account\n\n try:\n return {'is_event_user_not_confirmed': not user_activated}\n except Exception as e:\n print 'Exception!!!'\n return {'is_event_user_not_confirmed': False}", "def is_active(self) -> bool:\n return self.active == \"active\"", "def checkbalance(self):\n logging.debug('Checked user balance')", "def test_expired_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIs(user, False)\n self.assertFalse(activated)\n\n new_user = UserModel().objects.get(username='alice')\n self.assertFalse(new_user.is_active)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activated)", "def get_is_active(self, instance):\n return instance.check_finish()", "def check_user(self):\n try:\n if self.get_customer()[0][0] == self.dni:\n return True\n else:\n return False\n except:\n return False", "def test_activate_authenticated(client):\n employee = factories.EmployeeFactory(\n company=factories.CompanyFactory(),\n account_status=False\n )\n with client.session_transaction() as session:\n session[\"user_id\"] = employee.id\n g.user = employee\n response = client.post(\"/auth/activate\")\n assert b\"<h1>Successfully activated your account.</h1>\" in response.data\n assert employee.account_status\n assert response.status_code == HTTPStatus.OK" ]
[ "0.77060133", "0.7657495", "0.74019575", "0.7384715", "0.724473", "0.72160727", "0.72082347", "0.70193785", "0.6999122", "0.6985526", "0.6979906", "0.6939069", "0.6910385", "0.6892355", "0.68483496", "0.68368363", "0.67889947", "0.67825264", "0.675138", "0.67420274", "0.6720102", "0.67127573", "0.6710836", "0.6663598", "0.66325", "0.6612243", "0.6603795", "0.65992975", "0.6574003", "0.65713865", "0.6567642", "0.65476674", "0.6544167", "0.65243393", "0.65209246", "0.6512811", "0.6505811", "0.6505811", "0.6505811", "0.6505811", "0.6505811", "0.64810187", "0.64762187", "0.64716285", "0.64585197", "0.64580697", "0.64546835", "0.6451918", "0.6450692", "0.6437787", "0.64210397", "0.64145565", "0.6410186", "0.64053994", "0.6389981", "0.6387844", "0.6375322", "0.63701797", "0.636699", "0.6364897", "0.63484645", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6343443", "0.6340824", "0.6331435", "0.6331435", "0.6331435", "0.63236827", "0.631264", "0.6302488", "0.6292525", "0.6291548", "0.62723595", "0.62699264", "0.626196", "0.6261865", "0.62617266", "0.6261544", "0.62469417", "0.6240871", "0.6238582", "0.6235353", "0.62265134" ]
0.72469395
4
Checks whether the user is an invited user who has not yet activated their account.
def is_invited_pending_activation(self): if self.registration_method == self.INVITED \ and self.is_pending_activation(): return True else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_not_approved_user(self):\n (_,\n joining_user_id,\n conversation_id,\n _) = self.setup_invites(is_approved=None)\n self.set_session_cookie(joining_user_id, conversation_id)\n self.set_user_cookie(joining_user_id, conversation_id)\n uri = '/status/{}/{}'.format(conversation_id, joining_user_id)\n rsp = self.client.post(uri,\n data={'public_key': '',\n 'last_message_seen_id': 0})\n\n rsp_json = json.loads(rsp.data)\n self.assertFalse(rsp_json['success'])", "def test_not_approved_user(self):\n (_,\n joining_user_id,\n conversation_id,\n _) = self.setup_invites(is_approved=False)\n self.set_session_cookie(joining_user_id, conversation_id)\n self.set_user_cookie(joining_user_id, conversation_id)\n uri = '/status/{}/{}'.format(conversation_id, joining_user_id)\n rsp = self.client.post(uri,\n data={'public_key': '',\n 'last_message_seen_id': 0})\n\n rsp_json = json.loads(rsp.data)\n self.assertFalse(rsp_json['success'])", "def __ne__(self, other: 'InvitedUser') -> bool:\n return not self == other", "def has_invites(self):\r\n return self.invite_ct > 0", "def is_invincible(self):\n\t\treturn self._invincible", "def testHasNoInvites(self):\r\n u = User()\r\n u.invite_ct = 0\r\n self.assertFalse(u.has_invites(), 'User should have no invites')\r\n self.assertFalse(\r\n u.invite('[email protected]'), 'Should not be able to invite a user')", "def test_invited(self) -> None:\n\n self._perform_background_initial_update()\n\n u1 = self.register_user(\"u1\", \"pass\")\n u1token = self.login(\"u1\", \"pass\")\n r1 = self.helper.create_room_as(u1, tok=u1token)\n\n u2 = self.register_user(\"u2\", \"pass\")\n\n r1stats_ante = self._get_current_stats(\"room\", r1)\n assert r1stats_ante is not None\n\n self.helper.invite(r1, u1, u2, tok=u1token)\n\n r1stats_post = self._get_current_stats(\"room\", r1)\n assert r1stats_post is not None\n\n self.assertEqual(\n r1stats_post[\"current_state_events\"] - r1stats_ante[\"current_state_events\"],\n 1,\n )\n self.assertEqual(\n r1stats_post[\"invited_members\"] - r1stats_ante[\"invited_members\"], +1\n )", "def is_invite_only(self):\n return self._tag == 'invite_only'", "def is_invite_only(self):\n return self._tag == 'invite_only'", "def user_is_attendee(user):\n exists = check_attendee_exists(user, user)\n if exists[0]:\n return True\n return False", "def __ne__(self, other: 'InvitedUserList') -> bool:\n return not self == other", "def __ne__(self, other: 'InviteUser') -> bool:\n return not self == other", "def _checkUserInactive(username,self):\r\n active = False\r\n user = _findUser(username)\r\n \r\n if user is not None:\r\n active = user.getIsActive()\r\n \r\n return active", "def user_is_activated(self, user_name):\n return not self._simultanious_log_ins and \\\n user_name in self._active_users_names", "def verify_user_existance(self, user):\n for client in self.clients:\n if user == client.get_name():\n return True\n return False", "def test_resend_activation_email_nonexistent_user(self):\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def send_user_invitation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def send_user_invitation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def __eq__(self, other: 'InvitedUser') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def get_available_invitees(self):\n return User.objects.exclude(pk=self.request.user.pk)", "def test_admin_approval_not_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)\n self.assertIs(profile.user.is_active, False)", "def activation_key_expired(self):\r\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\r\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\r\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == \"ALREADY_ACTIVATED\" or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n\n return (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def __ne__(self, other: 'InviteUserIamPolicy') -> bool:\n return not self == other", "def has_user(self, user): # pylint: disable=unused-argument\r\n return False", "def check_if_event_confirmed_user(request):\n\n if request.user.is_anonymous():\n user_activated = False\n else:\n user_activated = request.user.has_activated_account\n\n try:\n return {'is_event_user_not_confirmed': not user_activated}\n except Exception as e:\n print 'Exception!!!'\n return {'is_event_user_not_confirmed': False}", "def get_invincible(self):\n return self._invincible", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n _, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def send_user_invitation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def invite_user(request):\r\n params = request.params\r\n\r\n email = params.get('email', None)\r\n user = request.user\r\n\r\n if not email:\r\n # try to get it from the json body\r\n email = request.json_body.get('email', None)\r\n\r\n if not email:\r\n # if still no email, I give up!\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': user.username,\r\n 'error': \"Please submit an email address\"\r\n })\r\n\r\n email = email.lower()\r\n # first see if the user is already in the system\r\n exists = UserMgr.get(email=email.lower())\r\n if exists:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': exists.username,\r\n 'error': \"This user is already a Bookie user!\"\r\n })\r\n\r\n new_user = user.invite(email.lower())\r\n if new_user:\r\n LOG.debug(new_user.username)\r\n # then this user is able to invite someone\r\n # log it\r\n AuthLog.reactivate(new_user.username)\r\n\r\n # and then send an email notification\r\n # @todo the email side of things\r\n settings = request.registry.settings\r\n msg = ActivationMsg(new_user.email,\r\n \"Enable your Bookie account\",\r\n settings)\r\n\r\n msg.send(\r\n request.route_url(\r\n 'reset',\r\n username=new_user.username,\r\n reset_key=new_user.activation.code))\r\n return _api_response(request, {\r\n 'message': 'You have invited: ' + new_user.email\r\n })\r\n else:\r\n # you have no invites\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': user.username,\r\n 'error': \"You have no invites left at this time.\"\r\n })", "def check_user(self):\n try:\n if self.get_customer()[0][0] == self.dni:\n return True\n else:\n return False\n except:\n return False", "def activation_expired(self):\n return self.date_joined + timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) < timezone.now()", "def validUser(self):\n if self.state == SessionStates.LOGGED_OUT:\n return False\n\n # if self.user == None:\n # return False\n return True", "def test_registered_user_is_inactive(self):\n self.register_bob()\n the_user = User.objects.filter(username='russellszn')\n self.assertFalse(the_user[0].is_active)", "def clean(self):\n cleaned_data = super().clean()\n\n if not self.instance.is_active:\n raise forms.ValidationError('Invalid invitation.')\n\n if self.instance.email not in self.user.get_emails():\n raise forms.ValidationError(\n 'You are not invited.')\n\n return cleaned_data", "def activation_key_expired(self):\n exp_date = timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + exp_date <= datetime.now()", "def test_unexpired_account(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activation_key_expired())", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.user.date_joined + expiration_date <= datetime.datetime.now()", "def test_not_contains_user(self):\n print('(' + self.test_not_contains_user.__name__+')',\n self.test_not_contains_user.__doc__)\n # non existing doctor, it could be patient as well\n self.assertFalse(self.connection.contains_user(\n NON_EXIST_DOCTOR_USERNAME))", "def activation_key_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return self.activation_key == RegistrationProfile.ACTIVATED or \\\n (self.user.date_joined + expiration_date <= datetime.datetime.now())", "def test_expired_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIs(user, False)\n self.assertFalse(activated)\n\n new_user = UserModel().objects.get(username='alice')\n self.assertFalse(new_user.is_active)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activated)", "def test_new_user_not_authenticated(self):\n\n setup_identity_cache()\n\n url = \"/v1/actions/InviteUser\"\n headers = {}\n data = {\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": \"test_project_id\",\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n self.assertEqual(\n response.json(), {\"errors\": [\"Credentials incorrect or none given.\"]}\n )", "def check_inflight_already_running(self, user: Identifier) -> bool:\n with self._lock:\n for flow in self.in_flight:\n if flow.requestor == user:\n return True\n return False", "def is_participant(self, user) -> bool:\n return (\n user.is_superuser\n or user.groups.filter(pk=self.participants_group.pk).exists()\n )", "def still_deciding(self):\n for player in self.players:\n if isinstance(player, user.User):\n if not player.has_played:\n return True\n return False", "def has_invited_to_poll(self, poll_id):\n\n has_invited_to_poll = False\n invited_poll = PollUser.objects.filter(user=self, poll=Poll(poll_id))\n\n if len(invited_poll) >= 1:\n has_invited_to_poll = True\n else:\n has_invited_to_poll = False\n return has_invited_to_poll", "def is_invulnerable(self) -> bool:\n return self.invul_timer != 0", "def check_active(self, user):\r\n if not self.require_active:\r\n # Ignore & move on.\r\n return True\r\n\r\n return user.is_active", "def raise_not_editable(self, viewer):\n if not self.id or viewer.has_perm(\"bookwyrm.create_invites\"):\n return\n raise PermissionDenied()", "def is_examiner(self, user_obj):\n return self.examiners.filter(pk=user_obj.pk).count() > 0", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertEqual(user, new_user)\n self.assertFalse(activated)", "def canInvite(session):\n if session.user[\"userlevel\"] == \"admin\":\n return True\n\n dOrg = session.user[\"defaultOrganisation\"] or \"apache\"\n if session.DB.ES.exists(index=session.DB.dbname, doc_type=\"organisation\", id=dOrg):\n xorg = session.DB.ES.get(\n index=session.DB.dbname, doc_type=\"organisation\", id=dOrg\n )[\"_source\"]\n if session.user[\"email\"] in xorg[\"admins\"]:\n return True", "def checkIsEmailAvailable(self, email):\n\n return User.objects.filter(email=email).exists()", "def is_participant(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.event.participants:\n return True\n return False", "def assert_not_enrolled(self):\r\n self.assertFalse(\r\n CourseEnrollment.is_enrolled(self.ext_user, self.course.id),\r\n 'Did not expect ext_user to be enrolled in course'\r\n )", "def testInitialUserInactivated(self):\r\n u = User()\r\n u.email = gen_random_word(10)\r\n DBSession.add(u)\r\n\r\n self.assertEqual(\r\n False,\r\n u.activated,\r\n 'A new signup should start out deactivated by default')\r\n self.assertTrue(\r\n u.activation.code is not None,\r\n 'A new signup should start out as deactivated')\r\n self.assertEqual(\r\n 'signup',\r\n u.activation.created_by,\r\n 'This is a new signup, so mark is as thus')", "def user_is_examiner(userobj):\n from .assignment_group import AssignmentGroup\n return AssignmentGroup.published_where_is_examiner(userobj).exists()", "def is_pending_activation(self):\n if (self.auth_token_is_used and self.is_active):\n return False\n else:\n return True", "def already_booked(slots, attendees, user_name):\n already_joined = False\n for i in attendees:\n if i[\"email\"] == user_name+'@student.wethinkcode.co.za':\n already_joined = True\n\n if already_joined == True:\n return False\n else:\n return True", "def ref_user_flag(self):\n try:\n ref = User.objects.get(\n associated_emails__email__iexact=self.reference_email,\n associated_emails__is_verified=True)\n return True\n except ObjectDoesNotExist:\n return False", "def test_new_user_not_my_project(self):\n setup_identity_cache()\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"member\",\n \"username\": \"[email protected]\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": \"test_project_id\",\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def validate_member_invite(self, invite):\n queryset = TeamMember.objects.filter(\n Q(\n team=self.team,\n invite__team=self.team,\n invite__email=invite.email,\n ),\n )\n if queryset.exists():\n raise forms.ValidationError(\n _('An invitation was already sent to this email'),\n )\n return invite", "def test_invite_ct(self):\r\n # for now just make sure we can get a 200 call on it.\r\n params = {\r\n 'api_key': self.api_key\r\n }\r\n res = self.testapp.get('/api/v1/a/accounts/invites',\r\n params=params,\r\n status=200)\r\n # we should get back tuples of username/count\r\n data = json.loads(res.body)['users']\r\n found = False\r\n invite_count = None\r\n for user, count in data:\r\n if user == u'admin':\r\n found = True\r\n invite_count = count\r\n\r\n self.assertTrue(found, \"There should be the admin user.\" + res.body)\r\n self.assertEqual(\r\n 0,\r\n invite_count,\r\n \"The admin user shouldn't have any invites.\" + res.body)", "def test_non_activated_account(self):\r\n # When all the conditions are satisfied, the account should be deleted.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation: ' + str(len(activations)))\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users: ' + str(len(users)))\r\n activations[0].valid_until = datetime.utcnow() - timedelta(days=35)\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 0,\r\n len(activations),\r\n 'There should be no activations left')\r\n self.assertEqual(\r\n 1,\r\n len(users),\r\n 'We should have a total of 1 user still: ' + str(len(users)))\r\n # When the account is activated, it should not be deleted.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation: ' + str(len(activations)))\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users: ' + str(len(users)))\r\n users[1].activated = True\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 1,\r\n len(activations),\r\n 'We should have a total of 1 activation still')\r\n self.assertEqual(\r\n 2,\r\n len(users),\r\n 'We should have a total of 2 users still: ' + str(len(users)))\r\n # When the account last login is not None, it should not be deleted.\r\n # This happens when a user forgets his/her password.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 2,\r\n len(activations),\r\n 'We should have a total of 2 activations')\r\n self.assertEqual(\r\n 3,\r\n len(users),\r\n 'We should have a total of 3 users: ' + str(len(users)))\r\n users[2].last_login = datetime.utcnow()\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 2,\r\n len(activations),\r\n 'We should have a total of 2 activations still')\r\n self.assertEqual(\r\n 3,\r\n len(users),\r\n 'We should have a total of 3 users still: ' + str(len(users)))\r\n # The account should not be deleted before 30 days since signup.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 3,\r\n len(activations),\r\n 'We should have a total of 3 activations')\r\n self.assertEqual(\r\n 4,\r\n len(users),\r\n 'We should have a total of 4 users: ' + str(len(users)))\r\n UserMgr.non_activated_account(delete=True)\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 3,\r\n len(activations),\r\n 'We should have a total of 3 activations still')\r\n self.assertEqual(\r\n 4,\r\n len(users),\r\n 'We should have a total of 4 users still')\r\n # The account details should be shown if it is not asked to delete.\r\n email = u'[email protected]'\r\n UserMgr.signup_user(email, u'testcase')\r\n activations = Activation.query.all()\r\n users = User.query.all()\r\n self.assertEqual(\r\n 4,\r\n len(activations),\r\n 'We should have a total of 4 activations')\r\n self.assertEqual(\r\n 5,\r\n len(users),\r\n 'We should have a total of 5 users: ' + str(len(users)))\r\n account_signup = datetime.utcnow() - timedelta(days=35)\r\n activations[3].valid_until = account_signup\r\n account_details = UserMgr.non_activated_account(delete=False)\r\n self.assertEqual(\r\n email,\r\n account_details[0].email)\r\n self.assertEqual(\r\n False,\r\n account_details[0].activated)\r\n self.assertEqual(\r\n u'testcase',\r\n account_details[0].invited_by)", "def test_resend_activation_email_activated_user(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n\n profile = self.registration_profile.objects.get(user=user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(user.is_active)\n self.assertTrue(activated)\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n # Outbox has one mail, admin approve mail\n\n self.assertEqual(len(mail.outbox), 1)\n admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]\n for email in mail.outbox[0].to:\n self.assertIn(email, admins_emails)", "def is_expired(self):\n expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)\n return (self.date_joined + expiration_date <= datetime.datetime.now())", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def inactive_to_active(sender, instance, **kwargs):\n\n if instance.is_active and DiscoUser.objects.filter(id=instance.id, is_active=False).exists():\n from discogportal.discoutils.asyncutils import create_platform_invite_instance\n from discomail.models import PlatformInvite\n if not PlatformInvite.objects.filter(user=instance).exists():\n created = True\n create_platform_invite_instance(instance, created)\n else:\n pass\n else:\n pass", "def verify_user(self):\n verified = False\n if self.user.role.role_name == \"Admin\":\n verified = True\n\n return verified", "def get_is_responded(self, obj: Vacancy) -> bool:\n request = self.context.get(\"request\")\n if not request or not request.user or request.user.is_anonymous:\n return False\n return obj.responded_users.filter(pk=request.user.pk).exists()", "def admin_non_activated(request):\r\n ret = []\r\n res = UserMgr.non_activated_account()\r\n if res:\r\n ret = [u.username for u in res]\r\n\r\n return _api_response(request, {\r\n 'count': len(ret),\r\n 'status': True,\r\n 'data': ret,\r\n })", "def verify_player_pending(self, player_email):\n try:\n self.pending_players.index(player_email)\n return True\n except ValueError:\n return False", "def test_admin_approval_nonexistent_id(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)", "def test_resend_activation_email_activated_user(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n\n profile = self.registration_profile.objects.get(user=user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertTrue(user.is_active)\n self.assertTrue(activated)\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def test_user_invite_cant_edit_users_existing_user(self):\n project = fake_clients.FakeProject(name=\"test_project\")\n\n user = fake_clients.FakeUser(name=\"[email protected]\")\n\n setup_identity_cache(projects=[project], users=[user])\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": project.id,\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"user\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"username\": \"new_user\",\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": project.id,\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.json(), {\"notes\": [\"task created\"]})", "def is_eligible(self, myself):\n if self.author().screen_name == myself.screen_name:\n log_.debug(\"Not replying to my own tweets\")\n return False\n if self.is_retweet():\n log_.debug(\"Not processing pure retweets\")\n return False\n return True", "def test_user_not_in_users_table(self) -> None:\n user1 = self.register_user(\"user1\", \"pass\")\n token1 = self.login(user1, \"pass\")\n room = self.helper.create_room_as(user1, is_public=True, tok=token1)\n\n # Inject a join event for a user who doesn't exist\n self.get_success(inject_member_event(self.hs, room, \"@not-a-user:test\", \"join\"))\n\n # Another new user registers and joins the room\n user2 = self.register_user(\"user2\", \"pass\")\n token2 = self.login(user2, \"pass\")\n self.helper.join(room, user2, tok=token2)\n\n # The dodgy event should not have stopped us from processing user2's join.\n in_public = self.get_success(self.user_dir_helper.get_users_in_public_rooms())\n self.assertEqual(set(in_public), {(user1, room), (user2, room)})", "def set_invincible(self, status: bool):\n self._invincible = status\n if self._invincible: # if become invincible\n self._invincible_time = time.time() # record the invincible time", "def test_activation_deactivated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n # Deactivate the new user.\n new_user.is_active = False\n new_user.save()\n\n # Try to activate again and ensure False is returned.\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def test_resend_activation_email_expired_user(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activation_key_expired())\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def userExists(self, user_uuid):\n return self.getUser(user_uuid) is not None", "def isOrphaned(self):\n return len(self.user_storage.all()) == 0", "def user_present(ctx: Context, channel: TextChannel) -> bool:\n for member in channel.members:\n if member.id == ctx.author.id:\n return True\n\n return False", "def is_user_change_required(self):\n return self.__running_user != self.__desired_user", "def check_user_exists(self):\n is_exists = False\n if auth.UserInfo.objects.filter(\n user_id__username=self.username,\n is_active=True).exists():\n is_exists = True\n return is_exists", "def is_participant(self,user):\n if user.is_superuser:\n return True\n\n if user.groups.filter(name=self.participants_group_name).count() > 0:\n return True\n else:\n return False", "def test_activation_invalid_key(self):\n user, activated = self.registration_profile.objects.activate_user(\n 'foo', Site.objects.get_current())\n self.assertIs(user, False)\n self.assertFalse(activated)", "def confirm_login_allowed(self, user):\r\n if not user.is_active:\r\n raise forms.ValidationError(\r\n self.error_messages['inactive'],\r\n code='inactive',\r\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise forms.ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def get_people_invited(self, users):\n invited = []\n for user in users:\n if Room.verify_if_is_invited(user):\n invited.append(user)\n return invited", "def is_not_admin(user):\n return not user.is_superuser", "def confirm_login_allowed(self, user):\n # if the user has been disabled due to incorrect\n # password retries or other.\n if not user.is_active:\n return False; \n return True", "def test_user_invite_cant_edit_users(self):\n project = fake_clients.FakeProject(name=\"test_project\")\n\n setup_identity_cache(projects=[project])\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": project.id,\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"user\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"username\": \"new_user\",\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": project.id,\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.json(), {\"errors\": [\"actions invalid\"]})", "def _send_existing_agent_user_invite(self):\n standard_invite = self.instance\n try:\n agent_invite = AgentUserInvite.objects.get(\n agent=self._agent_user, organisation=standard_invite.organisation\n )\n except AgentUserInvite.DoesNotExist:\n agent_invite = AgentUserInvite(\n agent=self._agent_user, organisation=standard_invite.organisation\n )\n\n agent_invite.inviter = standard_invite.inviter\n agent_invite.status = AgentUserInvite.PENDING\n agent_invite.save()\n agent_invite.send_confirmation()\n return standard_invite", "def get_user_noreq(self, request):\n item = Item.objects.get(id=request.POST['item_id'])\n target_user = User.objects.filter(email=request.POST['email'])\n if not target_user.exists():\n # In this case we don't want to return to the initial page\n return JsonResponse({\n 'msg': \"ERROR: The user doesn't exist\"\n })\n if not item.can_be_borrowed():\n return self.init_and_toast(\"ERROR: The item is not available\")\n\n borrowing = Borrowing(user=target_user.first(), item=item, borrowing_by=request.user)\n borrowing.save()\n return self.init_and_toast(\"The item has been borrowed succesfully\")" ]
[ "0.70103204", "0.6990121", "0.69247997", "0.68423045", "0.6763782", "0.6740842", "0.6381607", "0.63654363", "0.63654363", "0.6342342", "0.63128126", "0.6306948", "0.6245732", "0.62384367", "0.6224568", "0.614176", "0.61390686", "0.61390686", "0.6134246", "0.6127191", "0.61174595", "0.60785985", "0.60702974", "0.606369", "0.60599107", "0.60539013", "0.6033172", "0.60242075", "0.6021813", "0.60110843", "0.60077554", "0.59993786", "0.599758", "0.59839493", "0.5932414", "0.59086835", "0.59040934", "0.58947414", "0.58928436", "0.5891193", "0.58599705", "0.58588964", "0.5855913", "0.5843082", "0.5790849", "0.57903224", "0.5789713", "0.57883894", "0.5778439", "0.5772442", "0.5755937", "0.57546705", "0.5751965", "0.57512563", "0.57483447", "0.57341033", "0.57320875", "0.57318765", "0.57170385", "0.5712375", "0.57074577", "0.5704344", "0.57022756", "0.566334", "0.56593364", "0.5654297", "0.5649508", "0.5647462", "0.5621089", "0.5620295", "0.56200266", "0.561904", "0.5612027", "0.5610032", "0.56030226", "0.5600533", "0.5592083", "0.5589559", "0.55845106", "0.5582729", "0.55822575", "0.5577297", "0.55771047", "0.5574935", "0.5571569", "0.55653125", "0.55619603", "0.5560438", "0.5559863", "0.5558491", "0.5558491", "0.5558491", "0.5558491", "0.5558491", "0.55418664", "0.55394435", "0.5537908", "0.5524763", "0.5513511", "0.5509213" ]
0.68240666
4
Checks whether the user has requested an account and is awaiting a decision.
def is_pending_approval(self): if self.registration_method == self.REQUESTED \ and self.is_pending_activation(): return True else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def consent_check():\n\n auth = current.auth\n\n person_id = auth.s3_logged_in_person()\n if not person_id:\n return None\n\n has_role = auth.s3_has_role\n if has_role(\"ADMIN\"):\n required = None\n elif has_role(\"VOUCHER_ISSUER\"):\n required = [\"STORE\", \"RULES_ISS\"]\n else:\n required = None\n\n if required:\n consent = current.s3db.auth_Consent(required)\n pending = consent.pending_responses(person_id)\n else:\n pending = None\n\n return pending", "def consent_check():\n\n auth = current.auth\n\n person_id = auth.s3_logged_in_person()\n if not person_id:\n return None\n\n has_role = auth.s3_has_role\n if has_role(\"ADMIN\"):\n required = None\n elif has_role(\"VOUCHER_ISSUER\"):\n required = [\"STORE\", \"RULES_ISS\"]\n else:\n required = None\n\n if required:\n consent = current.s3db.auth_Consent(required)\n pending = consent.pending_responses(person_id)\n else:\n pending = None\n\n return pending", "def is_attempted(self):\r\n return self.attempts > 0", "def agree(self,user_input):\n response = self.classification(user_input)\n if response in [\"ack\", \"affirm\"]:\n return True\n elif response in [\"deny\", \"negate\"]:\n return False\n else:\n return response", "def is_available(self):\n if not is_optional_task_permissions_revoking_enabled():\n return False\n if not ITask.providedBy(self.context):\n return False\n if not self.context.get_review_state() in FINAL_TASK_STATES:\n return False\n if api.user.has_permission('cmf.ManagePortal'):\n return True\n issuer = self.context.get_issuer_actor()\n return issuer.identifier == api.user.get_current().id", "async def validate_account(self) -> bool:\n raise NotImplementedError", "def _evaluate_has_auths(self, context, user, partner):\n # User fulfills initial authorization\n fulfills_auth = True\n # Checking if user has agreed to terms and conditions, otherwise\n # they shouldn't be authorized to access the collection\n user_agreed_terms = user.userprofile.terms_of_use\n\n if partner.authorization_method in [Partner.EMAIL, Partner.CODES, Partner.LINK]:\n partner_renew = True\n final_auth = fulfills_auth and user_agreed_terms and partner_renew\n else:\n final_auth = fulfills_auth and user_agreed_terms\n # User has authorizations, link to collection page\n context[\"has_auths\"] = final_auth\n\n return context", "def answer_available(self):\r\n if self.showanswer == '':\r\n return False\r\n elif self.showanswer == \"never\":\r\n return False\r\n elif self.runtime.user_is_staff:\r\n # This is after the 'never' check because admins can see the answer\r\n # unless the problem explicitly prevents it\r\n return True\r\n elif self.showanswer == 'attempted':\r\n return self.attempts > 0\r\n elif self.showanswer == 'answered':\r\n # NOTE: this is slightly different from 'attempted' -- resetting the problems\r\n # makes lcp.done False, but leaves attempts unchanged.\r\n return self.lcp.done\r\n elif self.showanswer == 'closed':\r\n return self.closed()\r\n elif self.showanswer == 'finished':\r\n return self.closed() or self.is_correct()\r\n\r\n elif self.showanswer == 'past_due':\r\n return self.is_past_due()\r\n elif self.showanswer == 'always':\r\n return True\r\n\r\n return False", "def test_form_when_a_user_has_a_request_awaiting_authorisation(self):\n self.approve_project(self.project)\n\n # Create a project user membership.\n ProjectUserMembership.objects.create(\n project=self.project,\n user=self.project_applicant,\n status=ProjectUserMembership.AWAITING_AUTHORISATION,\n date_joined=datetime.datetime.now(),\n date_left=datetime.datetime.now() + datetime.timedelta(days=10),\n )\n\n # Ensure the project user membership status is currently set to awaiting authorisation.\n membership = ProjectUserMembership.objects.get(\n user=self.project_applicant,\n project=self.project,\n )\n self.assertTrue(membership.is_awaiting_authorisation())\n\n # A request to create a project user membership should be rejected.\n form = ProjectUserMembershipCreationForm(\n initial={\n 'user': self.project_applicant,\n },\n data={\n 'project_code': self.project_code,\n },\n )\n self.assertFalse(form.is_valid())\n self.assertEqual(\n form.errors['project_code'],\n ['A membership request for this project already exists.'],\n )", "def not_complete(request):\n print(\"not_complete method in tutor_helper.py\")\n if user_auth(request):\n user = User.objects.get(email=request.user.email)\n print(\"\\t\", user)\n current_user = UserInformation.objects.get(user=user)\n if current_user.current_main_set is None:\n return False\n if current_user.completed_sets is not None:\n if current_user.current_main_set not in current_user.completed_sets.all():\n print(\"not complete\")\n print(current_user.current_main_set)\n return True\n else:\n if current_user.completed_sets is None:\n return True\n return False", "def check_account_status(request):\n\n user = request.user\n\n if not user.is_authenticated():\n return {\n 'current_user': user,\n 'check_account_status_url': reverse('check_account_status'),\n }\n\n session = request.session\n\n flag = session.get('show_email_confirmation_dialog', True)\n show = not user.has_activated_account and flag\n session['show_email_confirmation_dialog'] = False\n\n # We don't want so show email confirmation when use is trying to buy a ticket.\n if 'payment-details' in request.path:\n show = False\n\n return {\n 'current_user': user,\n 'show_email_confirmation_dialog': False,\n 'check_account_status_url': reverse('check_account_status'),\n }", "def can_accept(self, user):\n if user.has_perm('funding.make_application_decisions'):\n # Funding manager can override / update decisions, if required\n # But we still need to have had a offer made\n if self.status in ['G', 'A', 'N']:\n return True\n # Applicants can only decide on granted applications\n if self.status == 'G':\n if self.applicant == user:\n return True\n return False", "def test_user_has_valid_or_pending(self):\r\n user = UserFactory.create()\r\n attempt = SoftwareSecurePhotoVerification(user=user)\r\n\r\n # If it's any of these statuses, they don't have anything outstanding\r\n for status in [\"created\", \"ready\", \"denied\"]:\r\n attempt.status = status\r\n attempt.save()\r\n assert_false(SoftwareSecurePhotoVerification.user_has_valid_or_pending(user), status)\r\n\r\n # Any of these, and we are. Note the benefit of the doubt we're giving\r\n # -- must_retry, and submitted both count until we hear otherwise\r\n for status in [\"submitted\", \"must_retry\", \"approved\"]:\r\n attempt.status = status\r\n attempt.save()\r\n assert_true(SoftwareSecurePhotoVerification.user_has_valid_or_pending(user), status)", "def test_project_user_membership_awaiting_authorisation_status(self):\n self.membership.status = ProjectUserMembership.AWAITING_AUTHORISATION\n self.assertTrue(self.membership.awaiting_authorisation())\n\n self.membership.status = ProjectUserMembership.AUTHORISED\n self.assertFalse(self.membership.awaiting_authorisation())", "def answer_waiting_call(self) -> None:", "def check_user_and_login(self) -> Response:\n pass", "def check_option(update: Update, context: CallbackContext) -> None:\n if not update.effective_chat or not update.effective_user:\n return\n if not isinstance(context.chat_data, dict):\n raise AssertionError\n if update.effective_user.id not in context.chat_data[\n 'question_attempted_by']:\n chosen = int(update.callback_query.data.split('_')[1])\n que: Question = context.chat_data['qlist'][\n context.chat_data['question_number']]\n if context.chat_data['marksheet'].get(update.effective_user.id,\n None) is None:\n context.chat_data['marksheet'][int(\n update.effective_user.id)] = {\n 'name':\n escape_markdown(update.effective_user.full_name),\n 'score': 0\n }\n if que.is_correct(que.get_options()[chosen]):\n context.chat_data['marksheet'][\n update.effective_user.id]['score'] += 1\n context.bot.answer_callback_query(\n callback_query_id=update.callback_query.id,\n text=\"Correct!\",\n show_alert=True)\n context.chat_data['question_attempted_by'].append(\n update.effective_user.id)\n else:\n context.bot.answer_callback_query(\n callback_query_id=update.callback_query.id,\n text=\"Incorrect!, \" +\n f\"the correct answer is: {que.get_correct()}\",\n show_alert=True)\n context.chat_data['question_attempted_by'].append(\n update.effective_user.id)\n else:\n context.bot.answer_callback_query(\n callback_query_id=update.callback_query.id,\n text=\"You can only attempt once!\",\n show_alert=True)", "def test_check_contributing_state_completed_user_not_contributed(self):\r\n app = AppFactory.create()\r\n task = TaskFactory.create(app=app, n_answers=2)\r\n TaskRunFactory.create_batch(2, task=task)\r\n user = UserFactory.create()\r\n\r\n contributing_state = helpers.check_contributing_state(app_id=app.id,\r\n user_id=user.id)\r\n\r\n assert task.state == 'completed', task.state\r\n assert contributing_state == 'completed', contributing_state", "def waiting_confirmation(self):", "def consent(s, eType, eVal):\n try:\n import maya.cmds as cmds # Is Maya active? Ask using their GUI\n answer = cmds.confirmDialog(t=eType.__name__, m=CONFIRM_MSG, b=(\"Yes\",\"No\"), db=\"Yes\", cb=\"No\", ds=\"No\")\n return \"Yes\" == answer\n except ImportError:\n return True # No means to ask? Ah well ...", "def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()", "async def check_in_game(user_id, ctx): # this is meant for when it is accessed by commands outside of BlackJack.\n check = ex.first_result(await ex.conn.fetchrow(\"SELECT COUNT(*) From blackjack.games WHERE player1 = $1 OR player2 = $1\", user_id))\n if check:\n await ctx.send(f\"> **{ctx.author}, you are already in a pending/active game. Please type {await ex.get_server_prefix_by_context(ctx)}endgame.**\")\n return True", "def check_required(self):\n if not self.required_ran:\n self._get_start_date()\n self._get_querysets()\n self._convert_spend_currency()\n self._convert_to_daily_df()\n self._get_budget_spend_series()\n\n self.required_ran = True", "def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False", "def check_active(self, user):\r\n if not self.require_active:\r\n # Ignore & move on.\r\n return True\r\n\r\n return user.is_active", "async def ask_yes_or_no(question: str, threshold: float = 0.75) -> bool:\n return classify_yes_no(str(await ask(question)), threshold=threshold)", "def should_ask_if_examiner_want_to_give_another_chance(self):\n if self.assignment.is_electronic:\n return (self.delivery_status == \"corrected\" and not self.feedback.is_passing_grade) \\\n or self.delivery_status == 'closed-without-feedback'\n else:\n return False", "async def pending(self, ctx):\r\n if ctx.guild.id == 445092370006933505:\r\n data = self.config.guild(ctx.guild)\r\n lst = await data.get_raw('neededlist')\r\n description = \"\"\r\n coach = await data.coachid()\r\n coach_role = ctx.guild.get_role(coach)\r\n x = ctx.author.top_role\r\n if x >= coach_role:\r\n for member in lst:\r\n userobj = ctx.guild.get_member(int(member))\r\n description += (str(userobj.mention) + '\\n')\r\n embed = discord.Embed(color=0xFFFF00, title='Coaching Needed by following people', description=description)\r\n embed.set_footer(text=credit)\r\n await ctx.send(embed=embed)\r\n await ctx.send('Type \"{0}coaching done @<player name>\" if the player has been coached or type \"{0}coaching info <@playername>\" to view the details submitted by the user'.format(ctx.prefix))\r\n \r\n else:\r\n await ctx.send(\"You are not allowed to do that\")\r\n\r\n else:\r\n await ctx.send(\"This command only works in the Legend eSports server, join us at: https://discord.gg/GGuCXDn\")", "def CheckAccount(self):\n \n if self.userName != '':\n res=self.helper.getAccounts(self.userName)\n if res != None:\n if res == []:\n return False\n else:\n return res\n else:\n return None", "def awaiting_payment(self):", "def check_user(self):\n return self.client.service.checkUser(self.authentication).accountDetails", "def checkbalance(self):\n logging.debug('Checked user balance')", "def is_on_waiting_list(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.event.waiting_list:\n return True\n return False", "def is_eligible(self, myself):\n if self.author().screen_name == myself.screen_name:\n log_.debug(\"Not replying to my own tweets\")\n return False\n if self.is_retweet():\n log_.debug(\"Not processing pure retweets\")\n return False\n return True", "def decision(self) -> bool:\n\n while True:\n # Get's user input, makes all charactures lowercase, and removes any whitespace\n decision = input('Enter \"hit\" or \"stay\". \\n').lower().strip()\n\n if decision == 'hit' or decision == 'stay':\n return decision == 'hit'\n else:\n # Humans can be dumb. Doesn't break the while loop\n print('\\nYou must type \"hit\" or \"stay\".')", "def is_pending(self):\n status = self.get_status()\n return status[\"status\"] == 3", "def test_yes_option_enabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n \"-y\",\n *password_option,\n )\n confirm_mock.assert_not_called()", "def check_allow_reset(self):\r\n if not self.ready_to_reset:\r\n if self.current_task_number > 0:\r\n last_response_data = self.get_last_response(self.current_task_number - 1)\r\n current_response_data = self.get_current_attributes(self.current_task_number)\r\n\r\n if (current_response_data['min_score_to_attempt'] > last_response_data['score']\r\n or current_response_data['max_score_to_attempt'] < last_response_data['score']):\r\n self.state = self.DONE\r\n self.ready_to_reset = True\r\n\r\n return self.ready_to_reset", "def checkGoal(self):\n # -- It is not included for simplifity --#\n if self.reward_cumulative != None:\n x = round((abs(self.reward_cumulative) - abs(round(self.reward_cumulative))) * 100);\n rem_goal = x % 25\n rem_timeout = x % 20\n if rem_goal == 0 and x != 0:\n self.is_goal = True\n else:\n self.is_goal = False\n\n if rem_timeout == 0 and x != 0:\n self.is_timeout = True\n else:\n self.is_timeout = False", "def get_is_responded(self, obj: Vacancy) -> bool:\n request = self.context.get(\"request\")\n if not request or not request.user or request.user.is_anonymous:\n return False\n return obj.responded_users.filter(pk=request.user.pk).exists()", "def check_if_user_has_finished():\n ok_to_finish = True\n user_input_accepted = False\n while not user_input_accepted:\n user_input = input(\"Do you want to finish (y/n): \").lower()\n if user_input == 'y':\n user_input_accepted = True\n elif user_input == 'n':\n ok_to_finish = False\n user_input_accepted = True\n else:\n print('Response must be (y/n), please try again')\n return ok_to_finish", "async def coach(self, ctx):\r\n if ctx.guild.id == 445092370006933505:\r\n user = ctx.author\r\n dm_channel = user.dm_channel\r\n guild_data = self.config.guild(ctx.guild)\r\n coach_id = await guild_data.coachid()\r\n coach = ctx.guild.get_role(int(coach_id))\r\n channel_id = await self.config.guild(ctx.guild).coachchannel()\r\n channel = ctx.guild.get_channel(int(channel_id))\r\n if dm_channel is None:\r\n dm_channel = await user.create_dm()\r\n lst = await guild_data.get_raw(\"neededlist\")\r\n player_data = self.config.member(ctx.author)\r\n\r\n def check(m):\r\n return m.channel == dm_channel and m.author == user\r\n\r\n try:\r\n if user.id in lst:\r\n await ctx.send(\"You already have a coaching request pending please stay patient or contact our staff if its been over 48 hrs since your coaching request\")\r\n else:\r\n await ctx.send(\"Please check your DM's...\")\r\n await user.send(\"Please tell us your In game name?, Type 'stop' to stop the process\")\r\n ign = await self.bot.wait_for('message', timeout=60, check=check)\r\n ign_use = ign.content\r\n new_ign = ign.content.lower()\r\n if new_ign == \"stop\":\r\n raise UserEnd\r\n await user.send(\"Please tell us your Player Tag?, Type 'stop' to stop the process\")\r\n tag = await self.bot.wait_for('message', timeout=60, check=check)\r\n tag_use = tag.content\r\n new_tag = tag.content.lower()\r\n if new_tag == \"stop\":\r\n raise UserEnd\r\n await user.send(\"What time do you prefer for coaching? (Times in UTC only), Type 'stop' to stop the process\")\r\n time = await self.bot.wait_for('message', timeout=60, check=check)\r\n time_use = time.content\r\n np = time.content.lower()\r\n if np == \"stop\":\r\n raise UserEnd\r\n await user.send(\"What archatypes do you prefer to play?\")\r\n deck = await self.bot.wait_for('message', timeout=60, check=check)\r\n new_deck = deck.content.lower() # I know I could have made a function to check this but my brain is not working\r\n deck_use = deck.content\r\n if new_deck == \"stop\":\r\n raise UserEnd\r\n\r\n await user.send(\"You will be contacted by one of our coaches please stay patient.\")\r\n await channel.send(\"{} New coaching request from {}\".format(coach.mention, user.mention))\r\n await self.emb(ctx, \"Discord Name\", \"In Game Name\", \"Player Tag\", \"Preferred Time\", \"Deck Type\", user.mention, ign_use, tag_use, time_use, deck_use)\r\n lst.append(user.id)\r\n await self.config.guild(ctx.guild).neededlist.set(lst)\r\n await player_data.ign.set(ign_use)\r\n await player_data.tag.set(tag_use)\r\n await player_data.time.set(time_use)\r\n await player_data.deck_type.set(deck_use)\r\n\r\n except asyncio.exceptions.TimeoutError:\r\n await user.send(\"Timeout...\") # not sure where to send these messages\r\n return\r\n except UserEnd:\r\n await user.send(\"Stopped!\") # not sure where to send these messages\r\n return\r\n else:\r\n await ctx.send(\"This command only works in the Legend eSports server, join us at: https://discord.gg/GGuCXDn\")", "def is_waiting_to_be_assigned(self):\n if self.status == \"WAITING_TO_BE_ASSIGNED\":\n return True\n else:\n return False", "def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()", "def verify_player_pending(self, player_email):\n try:\n self.pending_players.index(player_email)\n return True\n except ValueError:\n return False", "def is_aprentice(self):\n return self.user_profile_status == self.APPRENTICE", "def step_impl(context):\n response = test_app.get(f\"/get_user_account/{user_id}\").json()\n assert response != []\n assert response[0][\"userid\"] == user_id\n assert response[0][\"balance\"] == 100", "def ready(self):\n if self._wait_auth:\n return False\n return True", "def is_user_change_required(self):\n return self.__running_user != self.__desired_user", "def test_func(self):\n member_to_finish = self.get_object()\n return self.request.user.rfid == member_to_finish.rfid", "async def control_checks(self, ctx):\n server_id = ctx.message.server.id\n requester = ctx.message.author\n #silently drop if not in voice\n if not self.in_voice(server_id):\n return False\n #refuse if user not in the same channel\n if not self.user_in_channel(server_id, requester):\n vcname = self.get_server_dict(server_id)['voice'].channel.name\n await ctx.bot.send_message(ctx.message.channel, \"You can't control me outside of {}.\".format(vcname))\n return False\n return True", "def _handle_consent_confirmation(user, is_confirmed):\n if is_confirmed == \"yes\":\n # user has already given consent, continue flow\n response = server.create_authorization_response(grant_user=user)\n else:\n # user did not give consent\n response = server.create_authorization_response(grant_user=None)\n return response", "def is_eligible(self) -> Optional[bool]:\n return pulumi.get(self, \"is_eligible\")", "def _assert_message_is_ask_response(\n self,\n message: W24TechreadMessage,\n ask_type: W24AskType,\n ) -> None:\n self._check_request_id(message)\n self._check_message_type(\n message,\n W24TechreadMessageType.ASK,\n ask_type)", "def uptodate_eligible(account_name):\n key = 'fava-uptodate-indication'\n if key in g.ledger.account_metadata(account_name):\n return g.ledger.account_metadata(account_name)[key] == 'True'\n else:\n return False", "def get_need_advisor(self):\n return self.owner.qi_required == QI_CHECK['yes'] and len(self.advisor.all()) == 0", "def check_inflight_already_running(self, user: Identifier) -> bool:\n with self._lock:\n for flow in self.in_flight:\n if flow.requestor == user:\n return True\n return False", "def test_successful_get_an_answer(self):\n self.is_authenticated(self.user1)\n response = self.get_answer()\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def can_approve(self, user, **data):\n raise Return(False)", "def is_profile_complete(self):\n return bool(self.fullname and self.username and self.email)", "def payment_approval(self, house_cost: (int, float)):\n if self.money_available >= house_cost: # Person has enough available money to make a deal with Realtor\n self.money_available -= house_cost\n print(f'Payment from {self.name} was approved')\n return True\n print(f'{self.name} doesn\\'t have enough money to buy this house')\n return False", "def check_balance():\n print(\"\\n\")\n print(messages.check_balance)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.check_balance(credentials)\n start_again() if result else BankOperationsUi.check_balance()", "def set_awaiting_turn(self):\n if self.status == self.PLAYER_BANKRUPT:\n return\n if self.status == self.PLAYER_AWAITING_TURN:\n return\n self.status = self.PLAYER_AWAITING_TURN\n # self.client.send_player_turn_end()", "def check_answer(self, choice):\n return choice == self.answer", "def test_func(self):\n answer = self.get_object()\n return True if self.request.user == answer.author or self.request.user.is_superuser else False", "def check_user(self):\n try:\n if self.get_customer()[0][0] == self.dni:\n return True\n else:\n return False\n except:\n return False", "def completion_status(self):\r\n if self.end_date < date.today():\r\n return True, self.END_DATE_ARRIVED\r\n try:\r\n if self.stats.num_contributions == self.max_contributions:\r\n return True, self.NUM_CONTRIBUTIONS_RECEIVED\r\n if self.target_amount > D('0'): # Not a free campaign\r\n if self.stats.amount_raised >= self.target_amount:\r\n return True, self.TARGET_AMOUNT_RAISED\r\n except Stats.DoesNotExist:\r\n return False, self.NO_CONTRIBUTIONS_YET\r\n return False, self.is_active and self.ACTIVE or self.NOT_ACTIVE", "def _is_goal_achieved(self) -> bool:\n assert self._goal_info_cache\n return self._goal_info_cache[1]", "def has_user(self, user): # pylint: disable=unused-argument\r\n return False", "def decision(question):\n return click.confirm(question, show_default=True)", "async def check_account(self) -> tuple:\n results = await self._api.call('system', 'check_account')\n\n username = results.header.vars.get('un')\n result = results.header.vars.get('signed_out')\n if not result:\n result = results.header.vars.get('signed_in')\n\n return AccountStatus(result), username", "def asking(self):\n return 'Sure.'", "def check(self, roommate_instance):\n if self.status == Item.PROCESSING_CODE and self.check_who == roommate_instance:\n self.status = Item.CHECKED_CODE\n else:\n raise PermissionDenied", "def isDone(self):\n if self.current_turn >= self.MAX_TURNS: return True\n if self.last_user_action[\"action\"] == \"END\": return True\n return False", "def check(self, answer):\n return self.answer == answer", "def validate_user_response(self):\n is_response_valid = False\n while is_response_valid is False:\n response = self.ask_user_input(\"Please, enter a valid option or command\")\n if response in self.options.keys():\n is_response_valid = True\n self.current_response = response\n else:\n print(\"Invalid option/command, please try again\")\n return is_response_valid", "def need_admin_approval(self):\n return self._need_admin_approval", "def check_contributing_state(app_id, user_id=None, user_ip=None):\r\n\r\n states = ('completed', 'can_contribute', 'cannot_contribute')\r\n if overall_progress(app_id) >= 100:\r\n return states[0]\r\n if n_available_tasks(app_id, user_id=user_id, user_ip=user_ip) > 0:\r\n return states[1]\r\n return states[2]", "def check_award_status(request, award, now):\n auction = request.validated['auction']\n protocol_overdue = protocol_overdue_predicate(award, 'pending.verification', now)\n contract_overdue = contract_overdue_predicate(award, 'active', now)\n payment_overdue = (award.status == 'pending.payment' and award['paymentPeriod']['endDate'] < now)\n if protocol_overdue or contract_overdue or payment_overdue:\n set_unsuccessful_award(request, auction, award, now)", "def check_if_event_confirmed_user(request):\n\n if request.user.is_anonymous():\n user_activated = False\n else:\n user_activated = request.user.has_activated_account\n\n try:\n return {'is_event_user_not_confirmed': not user_activated}\n except Exception as e:\n print 'Exception!!!'\n return {'is_event_user_not_confirmed': False}", "def custom_assess_status_check(self):\n options = self.options\n # can check options.thing to ensure that it makes sense\n # if wrong return 'blocked', \"The driver is badly configured ...\"\n return None, None", "def completed_by(quiz, user):\n\n return quiz.is_completed_by(user)", "def is_first_challenge_completed(self):\n participants = Participant.objects.filter(user_id=self.user.id)\n\n total_completions = 0\n\n total_completions += Entry.objects.filter(participant__in=participants).count()\n #total_completions += ParticipantPicture.objects.filter(participant__in=participants).count()\n #total_completions += ParticipantFreeText.objects.filter(participant__in=participants).count()\n\n if total_completions == 0:\n return False\n\n return True", "def test_func(self):\n return self.request.user.is_active # any active user", "def is_call_waiting(self) -> bool:", "def req_qry_trading_account(self):\n pass", "def _do_request(self):\n\n if time.time() < self._next_request:\n return False\n else:\n return True", "def is_pending_activation(self):\n if (self.auth_token_is_used and self.is_active):\n return False\n else:\n return True", "def is_pending(self):\n if self.status == \"PENDING\":\n return True\n else:\n return False", "def test_ask_yes_no_3(self, input_mock):\n response = basic.ask_yes_no(response_attempt=3)\n self.assertIsNone(response)", "def positive_balance_check(user):\n return has_positive_balance(user)", "def check_if_ask_question_page_is_presented(self):\n return self.if_element_displayed(by_locator=self.__ASK_QUESTION_PAGE)", "def verifysubscriptionstatusinaccounttab():\n pass", "def __enter__(self):\r\n\r\n # if the user account is not activated then no go\r\n if not self.user_acct.activated:\r\n raise HTTPForbidden('Deactivated Account')\r\n\r\n if AuthHelper.check_login(self.request, username=self.username):\r\n return True\r\n\r\n if AuthHelper.check_api(self.api_key, self.user_acct.api_key):\r\n return True\r\n\r\n raise HTTPForbidden('Invalid Authorization')", "def test_yes_option_disabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n *password_option,\n )\n confirm_mock.assert_called_once()", "def check_reply(user):\n if not user.is_authenticated():\n return 'not_auth'\n\n return 'ok'", "def is_invited_pending_activation(self):\n if self.registration_method == self.INVITED \\\n and self.is_pending_activation():\n return True\n else:\n return False", "def checkGoalReached(self):\n if self._after_dead_line():\n if not self._crowdsale_closed.get():\n self._crowdsale_closed.set(True)\n self.CrowdsaleEnded()\n\n if self._amount_raised.get() >= self._funding_goal.get():\n self._funding_goal_reached.set(True)\n self.GoalReached(self._addr_beneficiary.get(), self._amount_raised.get())\n Logger.debug(f'Goal reached!', TAG)", "def chk_account(self, account_num, action):\n try:\n if account_num not in self.accounts:\n raise NoAccount\n except NoAccount:\n print(\"No account id \"+account_num+\" That exists in this bank\")\n else:\n if action == 'return':\n return self.accounts[account_num]\n elif action == 'del':\n del self.accounts[account_num]", "def checkin(self):\n folio = self.folio_id\n if folio.payment_deposits <= 0:\n raise UserError(_(\"\"\"No record of security deposit found on folio {}\n \"\"\".format(folio.name)))\n if folio.state != 'on_queue':\n raise UserError(_(\n 'Folio {} is not yet to be processed'.format(self.folio_id.name)))\n hours, minutes = decimal_to_time(self.env.user.company_id.checkin_hour)\n can_check_in = datetime.combine(\n date.today(), tm(hours, minutes)) < datetime.now()\n if not can_check_in:\n raise UserError(\n 'Guest(s) cannot be checked in earlier than {}'.format(\n self.env.user.company_id.checkin_hour))\n if self.folio_id.room_id.occupy():\n self.folio_id.write({'state': 'checkin'})" ]
[ "0.63010466", "0.63010466", "0.6016238", "0.6006359", "0.600314", "0.5988364", "0.59785503", "0.59660965", "0.59252214", "0.592486", "0.59096104", "0.58924055", "0.58714646", "0.584293", "0.58214206", "0.5796933", "0.5795883", "0.5789712", "0.57822067", "0.5771979", "0.5765634", "0.57573617", "0.57171303", "0.5701908", "0.56926304", "0.56921995", "0.5678412", "0.5670671", "0.56697947", "0.5666829", "0.56658596", "0.56658345", "0.5656847", "0.56520236", "0.5646601", "0.56446135", "0.5637284", "0.56328535", "0.56303346", "0.5629006", "0.5619535", "0.55957776", "0.55895776", "0.55890167", "0.55851716", "0.5576572", "0.5569553", "0.5562075", "0.55576646", "0.5557125", "0.5551002", "0.5548806", "0.5544259", "0.5535463", "0.5526164", "0.55192834", "0.55181396", "0.55171865", "0.5514705", "0.55146337", "0.55140126", "0.55015415", "0.5498412", "0.549618", "0.54883397", "0.5487701", "0.54711574", "0.5463127", "0.5462092", "0.54576594", "0.5457053", "0.545655", "0.54533124", "0.5450361", "0.54469097", "0.5445455", "0.54325044", "0.54247826", "0.54230523", "0.54170054", "0.5414794", "0.5412026", "0.5408333", "0.53932786", "0.53902185", "0.53839695", "0.53788775", "0.5376459", "0.5375377", "0.53684443", "0.5366358", "0.5359942", "0.5353507", "0.535133", "0.53472155", "0.53401375", "0.53328073", "0.53286153", "0.53268063", "0.5324645" ]
0.6145321
2
Invite an inactive user (who needs to activate their account). Returns none if user already exists.
def invite_new_user(self, email, full_name): User = get_user_model() if self.is_moderator and self.has_perm('accounts.invite_user'): try: User.objects.get(email=email) except User.DoesNotExist: new_user = create_inactive_user(email, full_name) new_user.registration_method = new_user.INVITED new_user.moderator = self new_user.moderator_decision = new_user.PRE_APPROVED new_user.decision_datetime = timezone.now() new_user.auth_token = generate_unique_id() new_user.save() return new_user else: return None else: raise PermissionDenied
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def invite_user(request):\r\n params = request.params\r\n\r\n email = params.get('email', None)\r\n user = request.user\r\n\r\n if not email:\r\n # try to get it from the json body\r\n email = request.json_body.get('email', None)\r\n\r\n if not email:\r\n # if still no email, I give up!\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': user.username,\r\n 'error': \"Please submit an email address\"\r\n })\r\n\r\n email = email.lower()\r\n # first see if the user is already in the system\r\n exists = UserMgr.get(email=email.lower())\r\n if exists:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': exists.username,\r\n 'error': \"This user is already a Bookie user!\"\r\n })\r\n\r\n new_user = user.invite(email.lower())\r\n if new_user:\r\n LOG.debug(new_user.username)\r\n # then this user is able to invite someone\r\n # log it\r\n AuthLog.reactivate(new_user.username)\r\n\r\n # and then send an email notification\r\n # @todo the email side of things\r\n settings = request.registry.settings\r\n msg = ActivationMsg(new_user.email,\r\n \"Enable your Bookie account\",\r\n settings)\r\n\r\n msg.send(\r\n request.route_url(\r\n 'reset',\r\n username=new_user.username,\r\n reset_key=new_user.activation.code))\r\n return _api_response(request, {\r\n 'message': 'You have invited: ' + new_user.email\r\n })\r\n else:\r\n # you have no invites\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': user.username,\r\n 'error': \"You have no invites left at this time.\"\r\n })", "def inactive_to_active(sender, instance, **kwargs):\n\n if instance.is_active and DiscoUser.objects.filter(id=instance.id, is_active=False).exists():\n from discogportal.discoutils.asyncutils import create_platform_invite_instance\n from discomail.models import PlatformInvite\n if not PlatformInvite.objects.filter(user=instance).exists():\n created = True\n create_platform_invite_instance(instance, created)\n else:\n pass\n else:\n pass", "def create_inactive_user(self, form):\n new_user = form.save(commit=False)\n new_user.is_active = False\n new_user.save()\n\n self.send_activation_email(new_user)\n\n return new_user", "def create_inactive_user(self, form):\n\t\tnew_user = form.save(commit=False)\n\t\tnew_user.is_active = False\n\t\tnew_user.save()\n\n\t\tself.send_activation_email(new_user)\n\n\t\treturn new_user", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n _, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def invite_user():\n\n form = InviteUserForm()\n if form.validate_on_submit():\n invited_by = db.session.query(User).filter_by(id=current_user.id).first()\n user = User(\n invited_by=invited_by.full_name,\n first_name=form.first_name.data,\n last_name=form.last_name.data,\n email=form.email.data)\n db.session.add(user)\n db.session.commit()\n token = user.generate_confirmation_token()\n invite_link = url_for(\n 'account.join_from_invite',\n user_id=user.id,\n token=token,\n _external=True)\n\n get_queue().enqueue(\n send_email,\n recipient=user.email,\n subject='You Are Invited To Join',\n template='account/email/invite',\n user=user.id,\n invited_by=invited_by,\n invite_link=invite_link,\n invite_by=invited_by\n )\n flash('User {} successfully invited'.format(user.full_name),\n 'form-success')\n return redirect(url_for('invite.index'))\n return render_template('invite/new_user.html', form=form)", "def test_activation_already_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertEqual(user, new_user)\n self.assertFalse(activated)", "def activated_user(self):\n user = self.signup_user_two()\n user.is_active = True\n user.save()\n return user", "def activate(self, *args, **kwargs):\n username = self.validate_key(kwargs.get(\"activation_key\"))\n user = self.get_user(username)\n user.is_active = True\n user.save()\n return user", "def activate_user(self, activation_key):\n if SHA1_RE.search(activation_key):\n try:\n profile = RegistrationProfile.objects.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return False\n if not profile.activation_key_expired():\n user = profile.user\n user.is_active = True\n user.save()\n profile.activation_key = \"ALREADY_ACTIVATED\"\n profile.save()\n return user\n\n return False", "def activate_user(self, activation_key):\r\n # Make sure the key we're trying conforms to the pattern of a\r\n # SHA1 hash; if it doesn't, no point trying to look it up in\r\n # the database.\r\n if SHA1_RE.search(activation_key):\r\n try:\r\n profile = self.get(activation_key=activation_key)\r\n except self.model.DoesNotExist:\r\n return False\r\n if not profile.activation_key_expired():\r\n user = profile.user\r\n user.is_active = True\r\n user.save()\r\n profile.activation_key = \"ALREADY_ACTIVATED\"\r\n profile.save()\r\n return user\r\n return False", "def invite_user(request):\n moderator = request.user\n site = get_current_site(request)\n\n invitation_form = InviteMemberForm(request.POST)\n\n if invitation_form.is_valid():\n\n # Invite user\n full_name = invitation_form.cleaned_data['full_name']\n email = invitation_form.cleaned_data['email']\n new_user = moderator.invite_new_user(email, full_name)\n\n # Log moderation event\n msg_type = ModerationLogMsg.INVITATION\n log_comment = _('{} invited {}'.format(moderator.get_full_name(),\n new_user.get_full_name()))\n log_moderator_event(msg_type=msg_type,\n user=new_user,\n moderator=moderator,\n comment=log_comment)\n\n # Send email\n subject = _('Welcome to {}'.format(site.name))\n template = 'moderation/emails/invite_new_user.html'\n token = new_user.auth_token\n url = request.build_absolute_uri(\n reverse('accounts:activate-account', args=[token]))\n send_connect_email(subject=subject,\n template=template,\n recipient=new_user,\n sender=moderator,\n site=site,\n url=url)\n\n messages.success(request, _('{} has been invited to {}.'.format(\n new_user.get_full_name(), site.name)))\n\n return redirect('moderation:moderators')\n\n else:\n return moderation_home(request, invitation_form=invitation_form)", "def create_inactive_user(self, username, email, password, first_name=None, last_name=None):\n\n new_user = User.objects.create_user(username, email, password)\n new_user.is_active = False\n new_user.first_name = first_name\n new_user.last_name = last_name\n new_user.save()\n\n registration_profile = self.create_registration_profile(new_user)\n registration_profile.send_activation_email()\n\n if not registration_profile:\n return None\n\n return new_user", "def test_resend_activation_email_activated_user(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n\n profile = self.registration_profile.objects.get(user=user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertTrue(user.is_active)\n self.assertTrue(activated)\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def testInviteCreatesUser(self):\r\n me = User()\r\n me.username = u'me'\r\n me.email = u'me.com'\r\n me.invite_ct = 2\r\n you = me.invite(u'you.com')\r\n\r\n self.assertEqual(\r\n 'you.com',\r\n you.username,\r\n 'The email should be the username')\r\n self.assertEqual(\r\n 'you.com',\r\n you.email,\r\n 'The email should be the email')\r\n self.assertTrue(\r\n len(you.api_key),\r\n 'The api key should be generated for the user')\r\n self.assertFalse(\r\n you.activated,\r\n 'The new user should not be activated')\r\n self.assertEqual(\r\n 1,\r\n me.invite_ct,\r\n 'My invite count should be deprecated')", "def invite(self,roomName,user):\n\n self.sendCommand(roomName +\" /invite\",user)", "def send_user_invitation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def test_expired_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIs(user, False)\n self.assertFalse(activated)\n\n new_user = UserModel().objects.get(username='alice')\n self.assertFalse(new_user.is_active)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activated)", "def activate_user(self, user):\n if not user.active:\n user.active = True\n return True\n return False", "def send_user_invitation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def send_user_invitation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"send_user_invitation\")", "def activate_account(self, activation_key):\n try:\n registration_profile = self.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return None\n\n if not registration_profile.is_expired():\n user = registration_profile.user\n user.is_active = True\n user.save()\n registration_profile.delete()\n return user\n else:\n return None", "def activate_user(self, email):\r\n activation_key = Registration.objects.get(user__email=email).activation_key\r\n # and now we try to activate\r\n check_for_get_code(self, 200, reverse('activate', kwargs={'key': activation_key}))\r\n # Now make sure that the user is now actually activated\r\n self.assertTrue(User.objects.get(email=email).is_active)", "def _activate_user(self, email):\r\n activation_key = registration(email).activation_key\r\n\r\n # and now we try to activate\r\n resp = self.client.get(reverse('activate', kwargs={'key': activation_key}))\r\n return resp", "def activate_user(self, user):\n if not user.active:\n user.active = True\n # noinspection PyUnresolvedReferences\n self.save(user)\n return True\n\n return", "def create_inactive_user(self, username, password, email, send_email=True, profile_callback=None):\n # Create the user.\n new_user = User.objects.create_user(username, email, password)\n new_user.is_active = False\n new_user.save()\n \n # And finally create the registration profile.\n registration_profile = self.create_profile(new_user)\n \n # Create site-specific profile, if specified.\n if profile_callback is not None:\n profile_callback(user=new_user)\n \n if send_email:\n from django.core.mail import send_mail\n current_domain = Site.objects.get_current().domain\n subject = \"Activate your new account at %s\" % current_domain\n message_template = loader.get_template('registration/activation_email.txt')\n message_context = Context({ 'site_url': 'http://%s/' % current_domain,\n 'activation_key': registration_profile.activation_key,\n 'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS })\n message = message_template.render(message_context)\n send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [new_user.email])\n return new_user", "def activate_user(self, activation_key):\n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point even trying to look it up\n # in the DB.\n if SHA1_RE.search(activation_key):\n try:\n user_profile = self.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n return False\n if not user_profile.activation_key_expired():\n # Account exists and has a non-expired key. Activate it.\n user = user_profile.user\n user.is_active = True\n user.save()\n return user\n return False", "def _send_existing_agent_user_invite(self):\n standard_invite = self.instance\n try:\n agent_invite = AgentUserInvite.objects.get(\n agent=self._agent_user, organisation=standard_invite.organisation\n )\n except AgentUserInvite.DoesNotExist:\n agent_invite = AgentUserInvite(\n agent=self._agent_user, organisation=standard_invite.organisation\n )\n\n agent_invite.inviter = standard_invite.inviter\n agent_invite.status = AgentUserInvite.PENDING\n agent_invite.save()\n agent_invite.send_confirmation()\n return standard_invite", "def test_user_invite_cant_edit_users_existing_user(self):\n project = fake_clients.FakeProject(name=\"test_project\")\n\n user = fake_clients.FakeUser(name=\"[email protected]\")\n\n setup_identity_cache(projects=[project], users=[user])\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": project.id,\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"user\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"username\": \"new_user\",\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": project.id,\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.json(), {\"notes\": [\"task created\"]})", "def create_inactive_user(self, username, password, email,\r\n send_email=True, profile_callback=None):\r\n new_user = User.objects.create_user(username, email, password)\r\n new_user.is_active = False\r\n new_user.save()\r\n \r\n registration_profile = self.create_profile(new_user)\r\n \r\n if profile_callback is not None:\r\n profile_callback(user=new_user)\r\n \r\n if send_email:\r\n from django.core.mail import send_mail\r\n current_site = Site.objects.get_current()\r\n \r\n subject = render_to_string('registration/activation_email_subject.txt',\r\n { 'site': current_site })\r\n # Email subject *must not* contain newlines\r\n subject = ''.join(subject.splitlines())\r\n \r\n message = render_to_string('registration/activation_email.txt',\r\n { 'activation_key': registration_profile.activation_key,\r\n 'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,\r\n 'site': current_site })\r\n \r\n send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [new_user.email])\r\n return new_user", "def test_activation_deactivated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n # Deactivate the new user.\n new_user.is_active = False\n new_user.save()\n\n # Try to activate again and ensure False is returned.\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(activated)", "def acceptInvite(self, user):\n invite = user if isinstance(user, MyPlexInvite) else self.pendingInvite(user, includeSent=False)\n params = {\n 'friend': int(invite.friend),\n 'home': int(invite.home),\n 'server': int(invite.server)\n }\n url = MyPlexInvite.REQUESTS + f'/{invite.id}' + utils.joinArgs(params)\n return self.query(url, self._session.put)", "def reinvite_user(self, user, email):\n if self.is_moderator and self.has_perm('accounts.invite_user'):\n # Reset email, set a new token and update decision datetime\n user.email = email\n user.auth_token = generate_unique_id()\n user.decision_datetime = timezone.now()\n user.save()\n\n return user\n\n else:\n raise PermissionDenied", "def activate_user(cls, activation_key):\n #from registration.signals import user_activated\n \n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point trying to look it up in\n # the database.\n db = DB_Session()\n if SHA1_RE.search(activation_key):\n query = db.query(RegistrationProfile)\n profile = query.filter(RegistrationProfile.activation_key == activation_key).one()\n if not profile:\n return False\n if not profile.activation_key_expired():\n user = profile.user\n user.is_active = 1\n profile.activation_key = RegistrationProfile.ACTIVATED\n db.flush()\n db.commit()\n db.close()\n #user_activated.send(sender=self.model, user=user)\n return user\n return False", "def test_resend_activation_email_activated_user(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n\n profile = self.registration_profile.objects.get(user=user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.assertFalse(user.is_active)\n self.assertTrue(activated)\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n # Outbox has one mail, admin approve mail\n\n self.assertEqual(len(mail.outbox), 1)\n admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]\n for email in mail.outbox[0].to:\n self.assertIn(email, admins_emails)", "def invite_user(session, invitee):\n session.invite_event.clear()\n key = b64encode(messaging.common.pkc_encrypt(\n session.get_channel_key(), session.get_encryption_cert(invitee))).decode()\n msg = {\n kk.typ: kk.add_user,\n kk.inviter: session.user,\n kk.invitee: invitee,\n kk.chid: session.chan,\n kk.chkey: key,\n }\n msg[kk.signature] = b64encode(\n messaging.common.create_msg_sig(session, msg)).decode()\n messaging.common.send_msg(session.sock, msg, key=session.symkey)", "def activate_user(self, activation_key, request=None):\n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point trying to look it up in\n # the database.\n if SHA1_RE.search(activation_key):\n try:\n profile = self.get(activation_key=activation_key)\n except self.model.DoesNotExist:\n profile = None\n statsd.incr('user.activate-error.does-not-exist')\n reason = 'key not found'\n if profile:\n if not profile.activation_key_expired():\n user = profile.user\n user.is_active = True\n user.save()\n\n # We don't need the RegistrationProfile anymore, delete it.\n profile.delete()\n\n # If user registered as contributor, send them the\n # welcome email.\n if user.groups.filter(name=CONTRIBUTOR_GROUP):\n self._send_email(\n confirmation_profile=profile,\n url=None,\n subject=_('Welcome to SUMO!'),\n text_template='users/email/contributor.ltxt',\n html_template='users/email/contributor.html',\n send_to=user.email,\n contributor=user)\n\n return user\n else:\n statsd.incr('user.activate-error.expired')\n reason = 'key expired'\n else:\n statsd.incr('user.activate-error.invalid-key')\n reason = 'invalid key'\n\n log.warning(u'User activation failure ({r}): {k}'.format(\n r=reason, k=activation_key))\n\n return False", "def activate_user(self, activation_key):\n # Make sure the key we're trying conforms to the pattern of a\n # SHA1 hash; if it doesn't, no point trying to look it up in\n # the database.\n try:\n profile = self.get(admin_key=activation_key)\n except self.model.DoesNotExist:\n return False, False\n user = profile.user\n activated = False\n if not user.is_active:\n user.is_active = True\n user.save()\n activated = True\n return (activated, user)", "def user_activation(user):\n act_hash = random_password(32)\n user.set_hashword(act_hash)\n user.save()\n base_url = url_for('public.home', _external=True)\n act_url = url_for(\n 'auth.activate',\n userid=user.id,\n userhash=act_hash,\n _external=True)\n if not 'mailman' in current_app.extensions:\n logging.warning('E-mail extension has not been configured')\n return act_hash\n msg = EmailMessage()\n msg.subject = 'Your dribdat account'\n msg.body = \\\n \"Hello %s,\\n\" % user.username \\\n + \"Thanks for signing up at %s\\n\\n\" % base_url \\\n + \"Tap here to activate your account:\\n\\n%s\" % act_url\n msg.to = [user.email]\n logging.info('Sending activation mail to user %d' % user.id)\n logging.debug(act_url)\n msg.send(fail_silently=True)\n return act_hash", "def _checkUserInactive(username,self):\r\n active = False\r\n user = _findUser(username)\r\n \r\n if user is not None:\r\n active = user.getIsActive()\r\n \r\n return active", "def activate_user(self, username):\n args = parser_activate.parse_args()\n isActive = request.json.get('isactive')\n\n query = \"\"\"UPDATE users SET isactive=%s WHERE username=%s\"\"\"\n values = isActive, username\n\n conn = self.db\n cursor = conn.cursor()\n cursor.execute(query, values)\n conn.commit()\n return True", "def create_inactive_session_from_invitation(self, setup_id, user_id):\n session = Session.objects.create(setup_id=Setup.objects.get(id=setup_id), user_id=User.objects.get(id=user_id),\n status=\"inactive\")", "def create_inactive_user(self,request,\n username,password,email,\n send_email=True, profile_callback=None, **kwargs):\n #如果存在用户的话不必进行新建只需对权限表进行操作即可,否则新建用户\n if User.objects.filter(email=email).count() == 0:\n new_user = User.objects.create_user(username, email, password)\n new_user.is_active = False\n new_user.save()\n registration_profile = self.create_profile(new_user)\n registration_profile.save()\n current_site = Site.objects.get_current()\n site_domain=current_site.domain\n if send_email:\n from django.core.mail import send_mail\n subject = render_to_string('registration/activation_email_subject.txt',\n\t\t\t\t\t\t\t {'site':get_current_site(request),\n\t\t\t\t\t\t\t\t 'username':username,\n\t\t\t\t\t\t\t\t 'password':password})\n subject = ''.join(subject.splitlines())\n message = render_to_string('registration/activation_email.txt',\n\t\t\t\t\t\t {'activation_key':registration_profile.activation_key,\n\t\t\t\t\t\t\t 'expiration_days':settings.ACCOUNT_ACTIVATION_DAYS,\n\t\t\t\t\t\t\t 'site':site_domain,\n\t\t\t\t\t\t\t 'username':username,\n\t\t\t\t\t\t\t 'password':password})\n logger.error(message)\n send_mail(subject,message,settings.DEFAULT_FROM_EMAIL,[new_user.email])\n else:\n\t\t\tnew_user = User.objects.get(email=email)\n\n# 创建普通用户NORMALUSER Profile\n new_normalprofile = NormalProfile(userid = new_user)\n new_normalprofile.save()\n# 对用户权限写入数据库\n new_authority = UserIdentity.objects.get(identity=NORMAL_USER)\n new_authority.auth_groups.add(new_user)\n new_authority.save()\n\n if profile_callback is not None:\n profile_callback(user=new_user)\n return new_user", "def test_resend_activation_email_expired_user(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activation_key_expired())\n\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def testInitialUserInactivated(self):\r\n u = User()\r\n u.email = gen_random_word(10)\r\n DBSession.add(u)\r\n\r\n self.assertEqual(\r\n False,\r\n u.activated,\r\n 'A new signup should start out deactivated by default')\r\n self.assertTrue(\r\n u.activation.code is not None,\r\n 'A new signup should start out as deactivated')\r\n self.assertEqual(\r\n 'signup',\r\n u.activation.created_by,\r\n 'This is a new signup, so mark is as thus')", "def test_admin_approval_not_activated(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)\n self.assertIs(profile.user.is_active, False)", "def activate(request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except (TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n\n if user is not None and account_activation_token.check_token(user, token):\n user.is_active = True\n user.profile.email_confirmed = True\n user.save()\n login(request, user)\n return redirect('home')\n else:\n return render(request, 'registration/activation_invalid.html')", "def test_registered_user_is_inactive(self):\n self.register_bob()\n the_user = User.objects.filter(username='russellszn')\n self.assertFalse(the_user[0].is_active)", "def activate(userid, userhash):\n a_user = User.query.filter_by(id=userid).first_or_404()\n if a_user.check_hashword(userhash):\n a_user.hashword = None\n a_user.active = True\n a_user.save()\n login_user(a_user, remember=True)\n flash(\"Welcome! Your user account has been activated.\", 'success')\n return redirect(url_for('auth.user_profile'))\n elif a_user.active:\n flash(\"Your user account is active.\", 'success')\n else:\n flash(\"Activation not found, or has expired.\" \\\n + \"Please try again or ask an organizer.\", 'warning')\n logout_user()\n return redirect(url_for('public.home'))", "def is_invited_pending_activation(self):\n if self.registration_method == self.INVITED \\\n and self.is_pending_activation():\n return True\n else:\n return False", "def activate_user(activation_code, new_password):\n um = logic.UserManager()\n try:\n user = um.lookup_user_by_activation_code(activation_code)\n user.activate()\n user.set_password(new_password)\n except ex.UserNotFoundError:\n blogger.debug(\"no user found with activation code %s\" % activation_code)\n transaction.abort()\n return dict(activated=False)\n else:\n transaction.commit()\n return dict(activated=True)", "def activate(request, uidb64, token):\r\n\ttry:\r\n\t\tuid = force_text(urlsafe_base64_decode(uidb64))\r\n\t\tuser = User.objects.get(pk=uid)\r\n\texcept (TypeError, ValueError, OverflowError, User.DoesNotExist):\r\n\t\tuser = None\r\n\r\n\tif user is not None and account_activation_token.check_token(user, token):\r\n\t\t# User activated and redirected to the homepage\r\n\t\tuser.is_active = True\r\n\t\tuser.profile.email_confirmed = True\r\n\t\tuser.save()\r\n\t\tlogin(request, user, backend='django.contrib.auth.backends.ModelBackend')\r\n\t\tgames = Game.objects.all()\r\n\t\treturn redirect('/', {'games': games, 'MEDIA_URL': settings.MEDIA_URL})\r\n\telse:\r\n\t\treturn render(request, 'account_activation_invalid.html')", "def invite(self):\n pass", "def active_user(request, uidb36=None, token=None,\r\n template_name='register/activation_confirm.html',\r\n token_generator=default_token_generator,\r\n current_app=None, extra_context=None):\r\n assert uidb36 is not None and token is not None # checked by URLconf\r\n \r\n try:\r\n uid_int = base36_to_int(uidb36)\r\n user = User.objects.get(id=uid_int)\r\n except (ValueError, User.DoesNotExist):\r\n user = None\r\n\r\n if user is not None and token_generator.check_token(user, token):\r\n validlink = True\r\n user.is_active = True\r\n user.save()\r\n \r\n #初始化userprofile\r\n profile_count = UserProfile.objects.filter(user=user).count()\r\n if profile_count == 0:\r\n profile = UserProfile()\r\n profile.user = user\r\n profile.song_ord_filed = 'post_datetime'\r\n profile.save()\r\n else:\r\n validlink = False\r\n context = {\r\n 'validlink': validlink,\r\n }\r\n context.update(extra_context or {})\r\n return render_to_response(template_name, context,\r\n context_instance=RequestContext(request, current_app=current_app))", "def activate(request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except(TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n if user is not None and account_activation_token.check_token(user, token):\n user.is_active = True\n user.save()\n return render(request, 'accounts/active_done.html')\n else:\n return HttpResponse('Activation link is invalid!')", "def test_valid_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIsInstance(user, UserModel())\n self.assertEqual(user.id, new_user.id)\n self.assertFalse(user.is_active)\n self.assertTrue(activated)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activated)", "def test_active_account_activation_key_expired(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n profile.refresh_from_db()\n self.assertTrue(profile.activation_key_expired())", "def add_user_with_status_unrequested(user):\r\n _add_user(user, CourseCreator.UNREQUESTED)", "def test_active_account_activation_key_expired(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n profile.refresh_from_db()\n self.assertTrue(profile.activation_key_expired())", "def activate(self):\r\n if self.activation_code == '':\r\n raise ValidationError('The member is already activated')\r\n signer = TimestampSigner()\r\n signer.unsign(self.activation_code, max_age=timedelta(days=2))\r\n self.hidden = False\r\n self.activation_code = ''\r\n self.joined_date = timezone.now()\r\n self.save()", "def create_inactive_user(self, username, password, email,\n locale=settings.LANGUAGE_CODE,\n text_template=None, html_template=None,\n subject=None, email_data=None,\n volunteer_interest=False, **kwargs):\n new_user = User.objects.create_user(username, email, password)\n new_user.is_active = False\n new_user.save()\n Profile.objects.create(user=new_user, locale=locale)\n\n registration_profile = self.create_profile(new_user)\n\n self.send_confirmation_email(\n registration_profile,\n text_template,\n html_template,\n subject,\n email_data,\n **kwargs)\n\n if volunteer_interest:\n statsd.incr('user.registered-as-contributor')\n group = Group.objects.get(name=CONTRIBUTOR_GROUP)\n new_user.groups.add(group)\n\n return new_user", "def test_unexpired_account(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertFalse(profile.activation_key_expired())", "def test_user_activation(self):\n user = User.objects.get()\n response = self.client.get(reverse('accounts:user-activate',\n kwargs={'uidb64': urlsafe_base64_encode(force_bytes(user.pk)),\n 'token': account_activation_token.make_token(user)}))\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_valid_activation(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n user, activated = self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n\n self.assertIsInstance(user, UserModel())\n self.assertEqual(user.id, new_user.id)\n self.assertTrue(user.is_active)\n self.assertTrue(activated)\n\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activated)", "def activate_user(self, user_name):\n if not self._simultanious_log_ins:\n self._active_users_names.add(user_name)", "def activate_account_api():\n\n # get the data for this query\n data = request.get_json()\n if not data:\n response = jsonify({\n 'success': False,\n 'message': 'Missing request body'\n })\n response.status_code = 422\n return response\n\n # process arguments\n arg_email = data.get('email').strip().lower()\n\n # check if there is a user with this activation_link\n secret_link = data.get('secret_link')\n user = db.session.query(User).filter(\n User.activation_link == secret_link,\n ).one_or_none()\n if not user:\n response = jsonify({\n 'success': False,\n 'message': 'This activation link is no longer active. Contact your system administrator to receive a new one.'\n })\n response.status_code = 200\n return response\n\n # check if this user has already activated their account\n if user.activated:\n response = jsonify({\n 'success': False,\n 'message': 'This account has already been activated. Try forgot password to recover your password.'\n })\n response.status_code = 200\n return response\n\n # check if the correct email address was supplied\n if user.email != arg_email:\n response = jsonify({\n 'success': False,\n 'message': 'This is not the correct email for this activation link. Contact your system administrator to request a link for this email.'\n })\n response.status_code = 200\n return response\n\n # generate and set new password\n new_password = generate_password_hash(data.get('password'))\n user.password = new_password\n user.activated = True\n db.session.add(user)\n db.session.commit()\n\n # log that a user just activated their account\n _log('++ {} just activated their account'.format(user.email), '_signup')\n\n # return authenticated token\n token = generate_auth_token(user_id=user.user_id)\n response = jsonify({\n 'success': True,\n 'token': token\n })\n response.status_code = 200\n return response", "def activate(request, activation_key, template_name='registration/activate.html'):\n activation_key = activation_key.lower() # Normalize before trying anything with it.\n account = RegistrationProfile.objects.activate_user(activation_key)\n account.is_active = True\n account.save()\n return render(request, template_name,\n { 'account': account,\n 'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS })", "def test_active_account_and_expired_accountactivation_key_expired(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n profile.refresh_from_db()\n self.assertTrue(profile.activation_key_expired())", "def test_active_account_and_expired_accountactivation_key_expired(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n profile = self.registration_profile.objects.get(user=new_user)\n self.registration_profile.objects.activate_user(\n profile.activation_key, Site.objects.get_current())\n profile.refresh_from_db()\n self.assertTrue(profile.activation_key_expired())", "def suspend_acct(request):\r\n params = request.params\r\n user = request.user\r\n\r\n # we need to get the user from the email\r\n email = params.get('email', None)\r\n\r\n if email is None and hasattr(request, 'json_body'):\r\n # try the json body\r\n email = request.json_body.get('email', None)\r\n\r\n if user is None and email is None:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'error': \"Please submit an email address\",\r\n })\r\n\r\n if user is None and email is not None:\r\n user = UserMgr.get(email=email)\r\n\r\n if user is None:\r\n request.response.status_int = 404\r\n return _api_response(request, {\r\n 'error': \"Please submit a valid address\",\r\n 'email': email\r\n })\r\n\r\n # check if we've already gotten an activation for this user\r\n if user.activation is not None:\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'error': \"\"\"You've already marked your account for reactivation.\r\nPlease check your email for the reactivation link. Make sure to\r\ncheck your spam folder.\"\"\",\r\n 'username': user.username,\r\n })\r\n\r\n # mark them for reactivation\r\n user.reactivate(u\"FORGOTTEN\")\r\n\r\n # log it\r\n AuthLog.reactivate(user.username)\r\n\r\n # and then send an email notification\r\n # @todo the email side of things\r\n settings = request.registry.settings\r\n msg = ReactivateMsg(user.email,\r\n \"Activate your Bookie account\",\r\n settings)\r\n\r\n msg.send({\r\n 'url': request.route_url(\r\n 'reset',\r\n username=user.username,\r\n reset_key=user.activation.code),\r\n 'username': user.username\r\n })\r\n\r\n return _api_response(request, {\r\n 'message': \"\"\"Your account has been marked for reactivation. Please\r\n check your email for instructions to reset your\r\n password\"\"\",\r\n })", "def activate_user(username, code, new_pass):\r\n\r\n qry = Activation.query.\\\r\n filter(Activation.code == code).\\\r\n filter(User.username == username)\r\n\r\n res = qry.first()\r\n\r\n if UserMgr.acceptable_password(new_pass) and res is not None:\r\n user = res.user\r\n user.activated = True\r\n user.password = new_pass\r\n res.activate()\r\n\r\n LOG.debug(dict(user))\r\n\r\n return True\r\n else:\r\n return None", "def test_new_user_not_my_project(self):\n setup_identity_cache()\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"member\",\n \"username\": \"[email protected]\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": \"test_project_id\",\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def activate(self):\n if not self.is_active:\n self.is_active = True\n self.activated_at = datetime.datetime.utcnow()\n import messaging # avoid circular import\n messaging.send_activated_emails(self)\n self.save()", "def save(self, commit=True):\n user = super(InvitationCompleteForm, self).save(commit)\n\n def save_invited_user():\n invited_user = self.invited_user\n invited_user.created_user = user\n invited_user.status = InvitedUser.STATUS_REGISTERED\n invited_user.save()\n if commit:\n save_invited_user()\n else:\n self.save_invited_user = save_invited_user\n return user", "async def invite(self, ctx):\n invite = f\"https://discordapp.com/api/oauth2/authorize?client_id={self.bot.user.id}&permissions=67584&scope=bot\"\n await ctx.send(embed=discord.Embed(\n color=discord.colour.Colour.teal(),\n description=f\":mailbox_with_mail: [Invite]({invite}) me to your server!\"))", "def save(self, *args, **kwargs):\n if not self.require_confirm_email:\n User.objects.filter(is_active=False, deactivation_reason=\"pending\").update(\n is_active=True, deactivation_reason=None\n )\n if not self.invite_question_text:\n self.invite_question_text = \"What is your favourite book?\"\n super().save(*args, **kwargs)", "def accept_invite(self):\n url = API_PATH[\"accept_mod_invite\"].format(subreddit=self.subreddit)\n self.subreddit._reddit.post(url)", "def require_user( request ):\n\n db = get_db()\n\n if ( not 'users_id' in session ):\n return False;\n\n users_id = session[ 'users_id' ]\n\n user = db.execute( text( \"select users_id, name, email from users where users_id = :id and is_active\" ), id = users_id ).fetchone()\n\n if ( not user ):\n return False;\n\n return user", "def activate(request, activation_key):\n profile = get_object_or_404(User, activation_key=activation_key)\n if profile.akey_expires < timezone.now():\n return render('user_account/activate.html', {'expired': True})\n\n profile.save(update_fields=['active', 'activation_key'])\n return render(\n 'user_account/activate.html',\n {'success': True, 'name': profile.name + \" \" + profile.surname}\n )", "def accounts_invites_add(request):\r\n rdict = request.matchdict\r\n username = rdict.get('username', None)\r\n if username:\r\n username = username.lower()\r\n count = rdict.get('count', None)\r\n\r\n if username is not None and count is not None:\r\n user = UserMgr.get(username=username)\r\n\r\n if user:\r\n user.invite_ct = count\r\n return _api_response(request, dict(user))\r\n else:\r\n request.response.status_int = 404\r\n ret = {'error': \"Invalid user account.\"}\r\n return _api_response(request, ret)\r\n else:\r\n request.response.status_int = 400\r\n ret = {'error': \"Bad request, missing parameters\"}\r\n return _api_response(request, ret)", "def test_invited(self) -> None:\n\n self._perform_background_initial_update()\n\n u1 = self.register_user(\"u1\", \"pass\")\n u1token = self.login(\"u1\", \"pass\")\n r1 = self.helper.create_room_as(u1, tok=u1token)\n\n u2 = self.register_user(\"u2\", \"pass\")\n\n r1stats_ante = self._get_current_stats(\"room\", r1)\n assert r1stats_ante is not None\n\n self.helper.invite(r1, u1, u2, tok=u1token)\n\n r1stats_post = self._get_current_stats(\"room\", r1)\n assert r1stats_post is not None\n\n self.assertEqual(\n r1stats_post[\"current_state_events\"] - r1stats_ante[\"current_state_events\"],\n 1,\n )\n self.assertEqual(\n r1stats_post[\"invited_members\"] - r1stats_ante[\"invited_members\"], +1\n )", "def test_activate_user(self):\n activated_user = (RegistrationProfile.objects\n .activate_user(self.activation_key))\n self.assertTrue(activated_user.registrationprofile.activated)\n self.assertFalse(activated_user.is_active)", "def test_resend_activation_email(self):\n user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), send_email=False, **self.user_info)\n self.assertEqual(len(mail.outbox), 0)\n\n profile = self.registration_profile.objects.get(user=user)\n orig_activation_key = profile.activation_key\n\n self.assertTrue(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n\n profile = self.registration_profile.objects.get(pk=profile.pk)\n new_activation_key = profile.activation_key\n\n self.assertNotEqual(orig_activation_key, new_activation_key)\n self.assertEqual(len(mail.outbox), 1)", "def get_available_invitees(self):\n return User.objects.exclude(pk=self.request.user.pk)", "def test_resend_activation_email_nonexistent_user(self):\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)", "def test_resend_inactive(self):\n self.invite.active = False\n self.invite.save()\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(url, method='POST')\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(len(mail.outbox), 0)", "async def invite(self, ctx):\r\n myInvite = discord.utils.oauth_url(self.bot.user.id, permissions=discord.Permissions(permissions=8))\r\n await ctx.channel.send('Invite me to *your* server with this link: \\n\\n<{}>'.format(myInvite))", "def signup_active(request, uidb36=None, token=None,\n post_activation_redirect=None,\n token_generator=default_token_generator,\n domain_override=None, use_https=False):\n assert uidb36 is not None and token is not None\n if post_activation_redirect is None:\n post_activation_redirect = reverse('amscms.core.views.signup_active_done')\n try:\n uid_int = base36_to_int(uidb36)\n user = User.objects.get(id=uid_int)\n except (ValueError, User.DoesNotExists):\n user = None\n \n if user is not None and token_generator.check_token(user, token):\n user.is_active = True\n user.save()\n \"\"\"\n Sends successful email to the user. \n \"\"\"\n if not domain_override:\n current_site = Site.objects.get_current()\n site_name = current_site.name\n domain = current_site.domain\n else:\n site_name = domain = domain_override\n c = {\n 'subject': _(u\"Registration was successful on %(site_name)s\" % {'site_name': site_name, }),\n 'site_name': site_name,\n 'user': user,\n 'domain': domain,\n 'protocol': use_https and 'https' or 'http',\n 'login_url': reverse('django.contrib.auth.views.login'),\n }\n send_email(user.email, c, settings.DEFAULT_FROM_EMAIL,\n \"registration/signup_email_activated.txt\",\n \"registration/signup_email_activated.html\")\n \n else:\n messages.error(request, _(u\"Invalid activation link, you may already activated, try to login. \"))\n return HttpResponseRedirect(\"/\")\n return HttpResponseRedirect(post_activation_redirect)", "async def create_user_open(\n *,\n user_in: schemas.UnprivilegedUserCreate,\n db: Session = Depends(deps.get_db),\n redis: aioredis.Redis = Depends(deps.get_redis),\n) -> Any:\n if not settings.USERS_OPEN_REGISTRATION:\n raise HTTPException(\n status_code=403,\n detail=\"Open user registration is forbidden on this server\",\n )\n user = crud.user.get_by_email(db, email=user_in.email)\n if user is not None:\n raise HTTPException(\n status_code=400,\n detail=\"The user with this username already exists in the system\",\n )\n user_in = schemas.UserCreate(user_in.dict(exclude_unset=True))\n user = await crud.user_cachedb.create(db, redis, obj_in=user_in)\n if settings.EMAILS_ENABLED and user_in.email:\n send_new_account_email(\n email_to=user_in.email, username=user_in.email, password=user_in.password\n )", "async def invite(ctx):\n permissions = 2134207679\n url = discord.utils.oauth_url(client_id=bot.user.id, permissions=discord.Permissions(permissions=permissions),\n scopes=(\"bot\", \"applications.commands\"))\n view = discord.ui.View()\n view.add_item(discord.ui.Button(label=\"Invite\", url=url))\n await ctx.respond(\"I'm glad you want to add me to your server, here's a link!\", view=view)", "def test_admin_approval_nonexistent_id(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n profile = self.registration_profile.objects.get(user=new_user)\n\n user = self.registration_profile.objects.admin_approve_user(\n profile.id, Site.objects.get_current())\n self.assertIs(user, False)", "def make_user_active(self, user_id, verify_code):\n\n try:\n user = self.get(id=user_id)\n if not user.verify_code or user.verify_code != verify_code or user.verify_code_expire < timezone.now():\n raise Exception('Verification code is invalid or expired.')\n\n # Verification code shouldn't be used again\n user.verify_code = None\n user.is_verified = True\n user.is_active = True\n user.save()\n\n except get_user_model().DoesNotExist:\n raise Exception('Password reset code is invalid or expired.')", "def accounts_inactive(request):\r\n user_list = UserMgr.get_list(active=False)\r\n ret = {\r\n 'count': len(user_list),\r\n 'users': [dict(h) for h in user_list],\r\n }\r\n return _api_response(request, ret)", "def enable_user(request):\n user_id = request.POST.get('user_id')\n if user_id is None:\n response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)}\n return HttpResponse(json.dumps(response))\n try:\n user_obj = User.objects.get(id=user_id)\n except User.DoesNotExist:\n response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)}\n return HttpResponse(json.dumps(response))\n user_obj.is_active = True\n user_obj.save()\n response = {'status': 1, 'status_message': 'Success'}\n return HttpResponse(json.dumps(response))", "def save(self):\n new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],\n password=self.cleaned_data['password1'],\n email=self.cleaned_data['email'],\n firstname=self.cleaned_data['first_name'],\n lastname=self.cleaned_data['last_name'],\n agree=self.cleaned_data['tos'])\n return new_user", "def confirm_login_allowed(self, user):\n if not user.is_active:\n raise ValidationError(\n self.error_messages['inactive'],\n code='inactive',\n )", "def test_expired_account(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n new_user.date_joined -= datetime.timedelta(\n days=settings.ACCOUNT_ACTIVATION_DAYS + 1)\n new_user.save()\n profile = self.registration_profile.objects.get(user=new_user)\n self.assertTrue(profile.activation_key_expired())", "def test_user_invite_cant_edit_users(self):\n project = fake_clients.FakeProject(name=\"test_project\")\n\n setup_identity_cache(projects=[project])\n\n url = \"/v1/actions/InviteUser\"\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": project.id,\n \"roles\": \"project_admin,member,project_mod\",\n \"username\": \"user\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n data = {\n \"username\": \"new_user\",\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": project.id,\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.json(), {\"errors\": [\"actions invalid\"]})", "def test_new_user_not_authenticated(self):\n\n setup_identity_cache()\n\n url = \"/v1/actions/InviteUser\"\n headers = {}\n data = {\n \"email\": \"[email protected]\",\n \"roles\": [\"member\"],\n \"project_id\": \"test_project_id\",\n }\n response = self.client.post(url, data, format=\"json\", headers=headers)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n self.assertEqual(\n response.json(), {\"errors\": [\"Credentials incorrect or none given.\"]}\n )", "def activate_user(request, uidb64, token):\n activation_session_token = '_activation_reset_token'\n activation_url_token = 'user-activation'\n title = \"Account activation\"\n context = {'title': 'Invalid Activation Link', 'isvalid': False}\n\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n except (TypeError, ValueError, OverflowError, User.DoesNotExist):\n user = None\n\n if user and user.is_active:\n messages.success(request, 'The account is active.')\n return redirect('login')\n\n if request.method == 'GET':\n if token == activation_url_token:\n session_token = request.session.get(activation_session_token)\n if default_token_generator.check_token(user, session_token):\n # If the token is valid, display the password reset form.\n form = forms.ActivationForm(user=user)\n return render(request, 'user/activate_user.html', {\n 'form': form, 'title': title})\n else:\n if default_token_generator.check_token(user, token):\n # Store the token in the session and redirect to the\n # password reset form at a URL without the token. That\n # avoids the possibility of leaking the token in the\n # HTTP Referer header.\n request.session[activation_session_token] = token\n redirect_url = request.path.replace(token, activation_url_token)\n return HttpResponseRedirect(redirect_url)\n else:\n if token == activation_url_token:\n session_token = request.session.get(activation_session_token)\n form = forms.ActivationForm(user=user, data=request.POST)\n if form.is_valid() and default_token_generator.check_token(user, session_token):\n with transaction.atomic():\n user.set_password(form.cleaned_data['password1'])\n user.is_active = True\n # Check legacy credentials\n check_legacy_credentials(user, user.email)\n user.save()\n email = user.associated_emails.first()\n email.verification_date = timezone.now()\n email.is_verified = True\n email.save()\n request.session.pop(activation_session_token)\n logger.info('User activated - {0}'.format(user.email))\n messages.success(request, 'The account has been activated.')\n login(request, user)\n return redirect('project_home')\n return render(request, 'user/activate_user.html', {'form': form,\n 'title': title})\n\n return render(request, 'user/activate_user_complete.html', context)" ]
[ "0.73747605", "0.68626577", "0.68586427", "0.6745645", "0.6459462", "0.6449257", "0.6446634", "0.6362106", "0.6320504", "0.6254038", "0.62451524", "0.6238193", "0.6215016", "0.62053627", "0.61985564", "0.61859196", "0.61620253", "0.61389893", "0.61187315", "0.61107427", "0.61107427", "0.61018246", "0.60605484", "0.6050716", "0.60448813", "0.6036829", "0.60333234", "0.60129374", "0.6003461", "0.6001646", "0.59547395", "0.5946948", "0.59078723", "0.5895275", "0.5878743", "0.5873162", "0.5872253", "0.58681977", "0.58611816", "0.5827892", "0.58242226", "0.5823477", "0.58015746", "0.57811195", "0.5774034", "0.5746121", "0.574305", "0.5741208", "0.5736394", "0.57337445", "0.57196975", "0.5710605", "0.57073027", "0.5694365", "0.56941986", "0.565832", "0.56372344", "0.56352395", "0.5634473", "0.56211823", "0.5602924", "0.5600133", "0.55875725", "0.55875224", "0.55823356", "0.5577903", "0.5572114", "0.55690795", "0.5568337", "0.5564486", "0.55574876", "0.55548704", "0.554858", "0.5501794", "0.5500371", "0.5490089", "0.5487811", "0.5486824", "0.5480657", "0.5474914", "0.5471583", "0.54607195", "0.5460507", "0.5452475", "0.5445649", "0.5437847", "0.543675", "0.5423425", "0.5422663", "0.5420531", "0.5410559", "0.53998804", "0.5395488", "0.53916806", "0.5385157", "0.53842515", "0.53827566", "0.5380626", "0.5371794", "0.5371378" ]
0.6778383
3